nedo-vision-worker-core 0.3.0__py3-none-any.whl → 0.3.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of nedo-vision-worker-core might be problematic. Click here for more details.

@@ -1,273 +1,302 @@
1
1
  import logging
2
2
  import time
3
+ import threading
4
+ import cv2
5
+ from typing import Any, Dict, Optional
6
+
3
7
  from .VideoStream import VideoStream
4
8
  from .SharedVideoDeviceManager import SharedVideoDeviceManager
5
- import threading
9
+
6
10
 
7
11
  class VideoStreamManager:
8
- """Manages multiple video streams dynamically using VideoStream threads."""
12
+ """Manages multiple video streams (files/RTSP) and direct devices (/dev/videoN or index) safely."""
9
13
 
10
14
  def __init__(self):
11
- self.streams = {} # Store streams as {worker_source_id: VideoStream}
12
- self.running = False
13
- self.lock = threading.Lock() # Add thread lock
15
+ # Regular streams: {worker_source_id: VideoStream}
16
+ self.streams: Dict[Any, VideoStream] = {}
17
+ # Direct device streams: {worker_source_id: {'url':..., 'latest_frame':..., 'last_update':..., 'alive': bool}}
18
+ self.direct_device_streams: Dict[Any, Dict[str, Any]] = {}
19
+ # Per-direct-device locks: {worker_source_id: threading.Lock}
20
+ self.direct_device_locks: Dict[Any, threading.Lock] = {}
21
+
14
22
  self.shared_device_manager = SharedVideoDeviceManager()
15
- self.direct_device_streams = {} # Store direct device streams {worker_source_id: latest_frame}
16
- self.direct_device_locks = {} # Store locks for direct device frame access
17
23
 
24
+ self._lock = threading.RLock()
25
+ self._running_evt = threading.Event() # safer than a bare bool
26
+
27
+ # -----------------------
28
+ # Helpers / classification
29
+ # -----------------------
18
30
  def _is_direct_device(self, url) -> bool:
19
31
  """Check if URL represents a direct video device."""
32
+ if isinstance(url, int):
33
+ return True
20
34
  if isinstance(url, str):
21
- return url.isdigit() or url.startswith('/dev/video')
22
- return isinstance(url, int)
35
+ return url.isdigit() or url.startswith("/dev/video")
36
+ return False
23
37
 
38
+ # -----------------------
39
+ # Public API
40
+ # -----------------------
24
41
  def add_stream(self, worker_source_id, url):
25
- """Adds a new video stream if it's not already active."""
26
- if worker_source_id not in self.streams and worker_source_id not in self.direct_device_streams:
27
- # Check if this is a direct video device
28
- if self._is_direct_device(url):
29
- self._add_direct_device_stream(worker_source_id, url)
30
- else:
31
- # Regular stream (file, RTSP, etc.)
32
- stream = VideoStream(url)
33
- stream.start() # Start the thread
34
- self.streams[worker_source_id] = stream
35
- logging.info(f"✅ Added and started video stream: {worker_source_id}")
36
- else:
37
- logging.warning(f"⚠️ Stream {worker_source_id} is already active.")
42
+ """Adds and starts a stream (regular file/RTSP or a shared direct device) if not already present."""
43
+ with self._lock:
44
+ if worker_source_id in self.streams or worker_source_id in self.direct_device_streams:
45
+ logging.warning("⚠️ Stream %s is already active.", worker_source_id)
46
+ return
38
47
 
39
- def _add_direct_device_stream(self, worker_source_id, url):
40
- """Add a direct device stream using the shared device manager."""
48
+ if self._is_direct_device(url):
49
+ self._add_direct_device_stream(worker_source_id, url)
50
+ return
51
+
52
+ # Regular stream
53
+ stream = VideoStream(url)
41
54
  try:
42
- # Initialize frame storage for this stream
43
- self.direct_device_streams[worker_source_id] = {
44
- 'url': url,
45
- 'latest_frame': None,
46
- 'last_update': time.time()
47
- }
48
- self.direct_device_locks[worker_source_id] = threading.Lock()
49
-
50
- # Create callback for receiving frames
51
- def frame_callback(frame):
52
- with self.direct_device_locks[worker_source_id]:
53
- self.direct_device_streams[worker_source_id]['latest_frame'] = frame
54
- self.direct_device_streams[worker_source_id]['last_update'] = time.time()
55
-
56
- # Subscribe to the shared device
57
- success = self.shared_device_manager.subscribe_to_device(
58
- source=url,
59
- subscriber_id=f"stream_{worker_source_id}",
60
- callback=frame_callback
61
- )
62
-
63
- if success:
64
- logging.info(f"✅ Added direct device stream: {worker_source_id} -> {url}")
65
- else:
66
- # Clean up on failure
67
- if worker_source_id in self.direct_device_streams:
68
- del self.direct_device_streams[worker_source_id]
69
- if worker_source_id in self.direct_device_locks:
70
- del self.direct_device_locks[worker_source_id]
71
- logging.error(f"❌ Failed to add direct device stream: {worker_source_id}")
72
-
55
+ stream.start() # start thread
56
+ with self._lock:
57
+ self.streams[worker_source_id] = stream
58
+ logging.info("✅ Added and started video stream: %s", worker_source_id)
73
59
  except Exception as e:
74
- logging.error(f"❌ Error adding direct device stream {worker_source_id}: {e}")
75
- # Clean up on error
76
- if worker_source_id in self.direct_device_streams:
77
- del self.direct_device_streams[worker_source_id]
78
- if worker_source_id in self.direct_device_locks:
79
- del self.direct_device_locks[worker_source_id]
60
+ logging.error("❌ Failed to start regular stream %s: %s", worker_source_id, e)
80
61
 
81
62
  def remove_stream(self, worker_source_id):
82
- """Removes and stops a video stream."""
63
+ """Stops and removes a stream (regular or direct device)."""
83
64
  if not worker_source_id:
84
65
  return
85
66
 
86
- with self.lock:
87
- # Check if it's a direct device stream
88
- if worker_source_id in self.direct_device_streams:
89
- self._remove_direct_device_stream(worker_source_id)
90
- return
91
-
92
- # Check if it's a regular stream
93
- if worker_source_id not in self.streams:
94
- logging.warning(f"⚠️ Stream {worker_source_id} not found in manager.")
95
- return
67
+ # Direct device?
68
+ with self._lock:
69
+ is_direct = worker_source_id in self.direct_device_streams
96
70
 
97
- logging.info(f"🛑 Removing video stream: {worker_source_id}")
71
+ if is_direct:
72
+ self._remove_direct_device_stream(worker_source_id)
73
+ return
98
74
 
99
- # Get reference before removing from dict
75
+ # Regular stream
76
+ with self._lock:
100
77
  stream = self.streams.pop(worker_source_id, None)
101
78
 
102
- if stream:
103
- try:
104
- stream.stop()
105
-
106
- except Exception as e:
107
- logging.error(f"❌ Error stopping stream {worker_source_id}: {e}")
108
- finally:
109
- stream = None # Ensure cleanup
110
-
111
- logging.info(f"✅ Stream {worker_source_id} removed successfully.")
79
+ if stream is None:
80
+ logging.warning("⚠️ Stream %s not found in manager.", worker_source_id)
81
+ return
112
82
 
113
- def _remove_direct_device_stream(self, worker_source_id):
114
- """Remove a direct device stream from the shared device manager."""
83
+ logging.info("🛑 Removing video stream: %s", worker_source_id)
115
84
  try:
116
- device_info = self.direct_device_streams.get(worker_source_id)
117
- if device_info:
118
- url = device_info['url']
119
-
120
- # Unsubscribe from the shared device
121
- success = self.shared_device_manager.unsubscribe_from_device(
122
- source=url,
123
- subscriber_id=f"stream_{worker_source_id}"
124
- )
125
-
126
- if success:
127
- logging.info(f"✅ Removed direct device stream: {worker_source_id}")
128
- else:
129
- logging.warning(f"⚠️ Failed to unsubscribe direct device stream: {worker_source_id}")
130
-
131
- # Clean up local storage
132
- if worker_source_id in self.direct_device_streams:
133
- del self.direct_device_streams[worker_source_id]
134
- if worker_source_id in self.direct_device_locks:
135
- del self.direct_device_locks[worker_source_id]
136
-
85
+ # Expectation: VideoStream.stop() should signal and join internally.
86
+ stream.stop()
137
87
  except Exception as e:
138
- logging.error(f"❌ Error removing direct device stream {worker_source_id}: {e}")
88
+ logging.error("❌ Error stopping stream %s: %s", worker_source_id, e)
89
+ finally:
90
+ stream = None
91
+
92
+ logging.info("✅ Stream %s removed successfully.", worker_source_id)
139
93
 
140
94
  def start_all(self):
141
- """Starts all video streams."""
95
+ """Starts all regular streams that are not alive. (Direct devices are publisher-driven.)"""
142
96
  logging.info("🔄 Starting all video streams...")
143
- for stream in self.streams.values():
144
- if not stream.is_alive():
145
- stream.start() # Start thread if not already running
146
- self.running = True
97
+ with self._lock:
98
+ for stream in self.streams.values():
99
+ if not stream.is_alive():
100
+ try:
101
+ stream.start()
102
+ except Exception as e:
103
+ logging.error("❌ Failed to start a stream: %s", e)
104
+ self._running_evt.set()
105
+
147
106
  def stop_all(self):
148
- """Stops all video streams."""
107
+ """Stops all streams (regular + direct devices)."""
149
108
  logging.info("🛑 Stopping all video streams...")
150
-
151
- with self.lock:
152
- # Get a list of IDs to avoid modification during iteration
153
- stream_ids = list(self.streams.keys())
154
- direct_stream_ids = list(self.direct_device_streams.keys())
155
-
156
- # Stop each regular stream
157
- for worker_source_id in stream_ids:
109
+
110
+ # Snapshot IDs to avoid dict-size-change races
111
+ with self._lock:
112
+ regular_ids = list(self.streams.keys())
113
+ direct_ids = list(self.direct_device_streams.keys())
114
+
115
+ for wid in regular_ids:
158
116
  try:
159
- self.remove_stream(worker_source_id)
117
+ self.remove_stream(wid)
160
118
  except Exception as e:
161
- logging.error(f"Error stopping stream {worker_source_id}: {e}")
162
-
163
- # Stop each direct device stream
164
- for worker_source_id in direct_stream_ids:
119
+ logging.error("Error stopping regular stream %s: %s", wid, e)
120
+
121
+ for wid in direct_ids:
165
122
  try:
166
- self.remove_stream(worker_source_id)
123
+ self.remove_stream(wid)
167
124
  except Exception as e:
168
- logging.error(f"Error stopping direct device stream {worker_source_id}: {e}")
169
-
170
- self.running = False
125
+ logging.error("Error stopping direct device stream %s: %s", wid, e)
126
+
127
+ self._running_evt.clear()
171
128
 
172
129
  def get_frame(self, worker_source_id):
173
- """Retrieves the latest frame for a specific stream."""
174
- # Check if it's a direct device stream first
130
+ """Returns the latest frame for the stream, or None if not available.
131
+ Non-blocking. No sleeps. Short lock scopes.
132
+ """
133
+ # Direct device path
134
+ with self._lock:
135
+ if worker_source_id in self.direct_device_streams:
136
+ # fall through to direct getter outside the manager lock
137
+ pass
138
+ else:
139
+ # Regular stream path
140
+ stream = self.streams.get(worker_source_id)
141
+
142
+ # Direct device?
175
143
  if worker_source_id in self.direct_device_streams:
176
144
  return self._get_direct_device_frame(worker_source_id)
177
-
178
- # Handle regular streams
179
- with self.lock: # Add lock protection for stream access
180
- stream = self.streams.get(worker_source_id)
181
- if stream is None:
182
- return None
183
145
 
184
- # Check if stream is still running
185
- if not stream.running:
186
- return None
187
-
188
- try:
189
- # **Ignore warnings for the first 5 seconds**
190
- elapsed_time = time.time() - stream.start_time
191
- if elapsed_time < 5:
192
- return None
193
-
194
- # Check if video file has ended
195
- if stream.is_file and stream.is_video_ended():
196
- logging.debug(f"Video file {worker_source_id} has ended, waiting for restart...")
197
- # Small delay to allow the video to restart
198
- time.sleep(0.1)
199
- return None
146
+ # Regular stream
147
+ if stream is None or not getattr(stream, "running", False):
148
+ return None
200
149
 
201
- return stream.get_frame() # Already returns a copy
202
- except Exception as e:
203
- logging.error(f"Error getting frame from stream {worker_source_id}: {e}")
150
+ try:
151
+ # Soft warm-up: your original code suppressed frames for the first 5s
152
+ start_time = getattr(stream, "start_time", None)
153
+ if start_time is not None and (time.time() - start_time) < 5.0:
204
154
  return None
205
155
 
206
- def _get_direct_device_frame(self, worker_source_id):
207
- """Get the latest frame from a direct device stream."""
208
- try:
209
- if worker_source_id not in self.direct_device_locks:
156
+ # If it's a file and ended, do not sleep here; let the producer handle restarts.
157
+ if getattr(stream, "is_file", False) and stream.is_video_ended():
158
+ logging.debug("Video file %s ended; waiting for producer to restart.", worker_source_id)
210
159
  return None
211
-
212
- with self.direct_device_locks[worker_source_id]:
213
- device_info = self.direct_device_streams.get(worker_source_id)
214
- if not device_info:
215
- return None
216
-
217
- frame = device_info.get('latest_frame')
218
- last_update = device_info.get('last_update', 0)
219
-
220
- # Check if frame is too old (5 seconds threshold)
221
- if time.time() - last_update > 5.0:
222
- return None
223
-
224
- return frame.copy() if frame is not None else None
225
-
160
+
161
+ # Must return a copy (VideoStream.get_frame() expected to handle copying)
162
+ return stream.get_frame()
226
163
  except Exception as e:
227
- logging.error(f"Error getting frame from direct device stream {worker_source_id}: {e}")
164
+ logging.error("Error getting frame from stream %s: %s", worker_source_id, e)
228
165
  return None
229
166
 
230
167
  def get_active_stream_ids(self):
231
- """Returns a list of active stream IDs."""
232
- regular_streams = list(self.streams.keys())
233
- direct_streams = list(self.direct_device_streams.keys())
234
- return regular_streams + direct_streams
235
-
236
- def get_stream_url(self, worker_source_id):
237
- """Returns the URL of a specific stream."""
238
- # Check direct device streams first
239
- if worker_source_id in self.direct_device_streams:
240
- return self.direct_device_streams[worker_source_id]['url']
241
-
242
- # Check regular streams
243
- stream = self.streams.get(worker_source_id)
244
- return stream.source if stream else None
245
-
246
- def has_stream(self, worker_source_id):
168
+ """Returns a snapshot of active stream IDs (regular + direct)."""
169
+ with self._lock:
170
+ return list(self.streams.keys()) + list(self.direct_device_streams.keys())
171
+
172
+ def get_stream_url(self, worker_source_id) -> Optional[str]:
173
+ """Returns the URL/source of a specific stream."""
174
+ with self._lock:
175
+ if worker_source_id in self.direct_device_streams:
176
+ return self.direct_device_streams[worker_source_id]["url"]
177
+ s = self.streams.get(worker_source_id)
178
+ return s.source if s else None
179
+
180
+ def has_stream(self, worker_source_id) -> bool:
247
181
  """Checks if a stream is active."""
248
- return (worker_source_id in self.streams or
249
- worker_source_id in self.direct_device_streams)
182
+ with self._lock:
183
+ return (worker_source_id in self.streams) or (worker_source_id in self.direct_device_streams)
250
184
 
251
- def is_running(self):
252
- """Checks if the manager is running."""
253
- return self.running
185
+ def is_running(self) -> bool:
186
+ """Checks if manager is 'running'."""
187
+ return self._running_evt.is_set()
254
188
 
255
- def is_video_file(self, worker_source_id):
256
- """Check if a stream is a video file."""
257
- # Direct device streams are never video files
258
- if worker_source_id in self.direct_device_streams:
259
- return False
260
-
261
- # Check regular streams
262
- stream = self.streams.get(worker_source_id)
263
- return stream.is_file if stream else False
189
+ def is_video_file(self, worker_source_id) -> bool:
190
+ """True if a stream is a file. Direct devices are never files."""
191
+ with self._lock:
192
+ if worker_source_id in self.direct_device_streams:
193
+ return False
194
+ s = self.streams.get(worker_source_id)
195
+ return bool(getattr(s, "is_file", False)) if s else False
264
196
 
265
197
  def get_device_sharing_info(self):
266
- """Get information about device sharing."""
198
+ """Returns info from the shared device manager."""
267
199
  return self.shared_device_manager.get_all_devices_info()
268
200
 
269
201
  def shutdown(self):
270
- """Shutdown the manager and clean up all resources."""
202
+ """Cleanly stop all and leave the shared manager to auto-clean."""
271
203
  logging.info("Shutting down VideoStreamManager")
272
204
  self.stop_all()
273
- # The SharedVideoDeviceManager is a singleton and will clean up automatically
205
+
206
+ # -----------------------
207
+ # Direct device management
208
+ # -----------------------
209
+ def _add_direct_device_stream(self, worker_source_id, url):
210
+ """Subscribe to a shared device and store frames safely, handling removal races."""
211
+ lock = threading.Lock()
212
+
213
+ with self._lock:
214
+ # Initialize storage
215
+ self.direct_device_locks[worker_source_id] = lock
216
+ self.direct_device_streams[worker_source_id] = {
217
+ "url": url,
218
+ "latest_frame": None,
219
+ "last_update": 0.0,
220
+ "alive": True, # tombstone flag
221
+ }
222
+
223
+ # Callback uses captured lock and checks the alive flag to avoid races
224
+ def frame_callback(frame):
225
+ # Use the per-worker lock we captured (not via dict lookup)
226
+ with lock:
227
+ with self._lock:
228
+ info = self.direct_device_streams.get(worker_source_id)
229
+ if not info or not info.get("alive", False):
230
+ return # dropped subscriber; ignore late frames
231
+ info["latest_frame"] = frame
232
+ info["last_update"] = time.time()
233
+
234
+ try:
235
+ success = self.shared_device_manager.subscribe_to_device(
236
+ source=url,
237
+ subscriber_id=f"stream_{worker_source_id}",
238
+ callback=frame_callback,
239
+ )
240
+ if success:
241
+ logging.info("✅ Added direct device stream: %s -> %s", worker_source_id, url)
242
+ else:
243
+ logging.error("❌ Failed to add direct device stream: %s", worker_source_id)
244
+ # rollback
245
+ with self._lock:
246
+ self.direct_device_streams.pop(worker_source_id, None)
247
+ self.direct_device_locks.pop(worker_source_id, None)
248
+ except Exception as e:
249
+ logging.error("❌ Error adding direct device stream %s: %s", worker_source_id, e)
250
+ with self._lock:
251
+ self.direct_device_streams.pop(worker_source_id, None)
252
+ self.direct_device_locks.pop(worker_source_id, None)
253
+
254
+ def _remove_direct_device_stream(self, worker_source_id):
255
+ """Unsubscribe and safely tear down direct device stream, tolerating late callbacks."""
256
+ # Mark as dead first so any in-flight callbacks become no-ops
257
+ with self._lock:
258
+ info = self.direct_device_streams.get(worker_source_id)
259
+ if not info:
260
+ logging.warning("⚠️ Direct device stream %s not found.", worker_source_id)
261
+ return
262
+ info["alive"] = False
263
+ url = info["url"]
264
+
265
+ try:
266
+ success = self.shared_device_manager.unsubscribe_from_device(
267
+ source=url,
268
+ subscriber_id=f"stream_{worker_source_id}",
269
+ )
270
+ if success:
271
+ logging.info("✅ Removed direct device stream: %s", worker_source_id)
272
+ else:
273
+ logging.warning("⚠️ Unsubscribe reported failure for direct device stream: %s", worker_source_id)
274
+ except Exception as e:
275
+ logging.error("❌ Error unsubscribing direct device stream %s: %s", worker_source_id, e)
276
+
277
+ # Now it is safe to drop references
278
+ with self._lock:
279
+ self.direct_device_streams.pop(worker_source_id, None)
280
+ self.direct_device_locks.pop(worker_source_id, None)
281
+
282
+ def _get_direct_device_frame(self, worker_source_id):
283
+ """Return last frame from a direct device if fresh, else None."""
284
+ with self._lock:
285
+ lock = self.direct_device_locks.get(worker_source_id)
286
+
287
+ if lock is None:
288
+ return None
289
+
290
+ # Serialize per-stream frame access
291
+ with lock:
292
+ with self._lock:
293
+ info = self.direct_device_streams.get(worker_source_id)
294
+ if not info:
295
+ return None
296
+ frame = info.get("latest_frame")
297
+ last_update = info.get("last_update", 0.0)
298
+
299
+ # Outside manager lock: only local refs used now
300
+ if (time.time() - last_update) > 5.0:
301
+ return None
302
+ return frame.copy() if frame is not None else None
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nedo-vision-worker-core
3
- Version: 0.3.0
3
+ Version: 0.3.2
4
4
  Summary: Nedo Vision Worker Core Library for AI Vision Processing
5
5
  Author-email: Willy Achmat Fauzi <willy.achmat@gmail.com>
6
6
  Maintainer-email: Willy Achmat Fauzi <willy.achmat@gmail.com>
@@ -391,40 +391,7 @@ def cleanup_monitoring():
391
391
  CoreService.unregister_callback("area_security")
392
392
  ```
393
393
 
394
- ### Dependencies
395
-
396
- The service relies on several key technologies:
397
-
398
- - **PyTorch**: Deep learning framework with CUDA support
399
- - **OpenCV**: Computer vision and video processing
400
- - **SQLAlchemy**: Database ORM and management
401
- - **FFmpeg**: Video streaming and processing
402
- - **Ultralytics**: YOLO model implementations
403
-
404
- ## Development Setup
405
-
406
- For development and testing:
407
-
408
- ```bash
409
- # Clone and setup development environment
410
- git clone <repository-url>
411
- cd nedo-vision-worker-core-v2
412
-
413
- # Create virtual environment
414
- python -m venv .venv
415
- source .venv/bin/activate # Linux/Mac
416
- # or
417
- .venv\Scripts\activate # Windows
418
-
419
- # Install in development mode
420
- pip install -e .
421
-
422
- # Run tests
423
- python test.py
424
-
425
- # Check system health
426
- nedo-worker-core doctor
427
- ```
394
+ See [INSTALL.md](INSTALL.md) for detailed installation instructions.
428
395
 
429
396
  ## Troubleshooting
430
397
 
@@ -1,7 +1,7 @@
1
- nedo_vision_worker_core/__init__.py,sha256=MspK_UZHm3z6OP_43yGsiLrYl8wW9NGCTgVwIUXPEx0,1924
2
- nedo_vision_worker_core/cli.py,sha256=-33biYVUbgJXdpjf_DL-pmEG-tnLQJWwOWZVhSTCKnE,7623
1
+ nedo_vision_worker_core/__init__.py,sha256=gOVK9WXdcVvRyelxasgOBdnkqE2wbdbRZ3VnuSXgobc,1924
2
+ nedo_vision_worker_core/cli.py,sha256=8YuKWsIgICUYXE_QtwyU3WzGhVjTWiAo5uzpFOmjNc8,5766
3
3
  nedo_vision_worker_core/core_service.py,sha256=dnHNjbslOeyeWqHDFnk_yKdfTICYzLyRIcuZNwF0Zf4,11323
4
- nedo_vision_worker_core/doctor.py,sha256=JQ14WDxxA-ABrx7XQ7vc8QpcZztMvCwqf0UGLlQRxaQ,10650
4
+ nedo_vision_worker_core/doctor.py,sha256=K_-hVV2-mdEefZ4Cfu5hMCiOxBiI1aXY8VtkkpK80Lc,10651
5
5
  nedo_vision_worker_core/ai/FrameDrawer.py,sha256=lj83WFaE70BQfkEc6AHcMBXaiEm8l3s_zJZG9C0NkAs,5286
6
6
  nedo_vision_worker_core/ai/ImageDebugger.py,sha256=5FwgNGZrxO2eT7hxdxp7N2gQ0oyyYDZChJ3PJapKu-w,4612
7
7
  nedo_vision_worker_core/ai/VideoDebugger.py,sha256=M6XVuK2Lq2ceE5UdYj2GLaMbEU6THmGzgQlVkqs-lAc,2578
@@ -61,7 +61,7 @@ nedo_vision_worker_core/models/worker_source_pipeline_detection.py,sha256=p6CJsi
61
61
  nedo_vision_worker_core/pipeline/PipelineConfigManager.py,sha256=X55i9GyXcW9ylO6cj2UMAZFSxxPViacL4H4DZl60CAY,1157
62
62
  nedo_vision_worker_core/pipeline/PipelineManager.py,sha256=kJslTS1QD7sgmvZ4ZShxW2HI2u_xzmNM5yD0KlpPx_4,5485
63
63
  nedo_vision_worker_core/pipeline/PipelinePrepocessor.py,sha256=cCiVSHHqsKCtKYURdYoEjHJX2GnT6zd8kQ6ZukjQ3V0,1271
64
- nedo_vision_worker_core/pipeline/PipelineProcessor.py,sha256=R40T_lNjRL4up94R57B1P2A1aiI9zwsknaCa0-pDF0w,16045
64
+ nedo_vision_worker_core/pipeline/PipelineProcessor.py,sha256=Qw8gRk7cOuVBAvS2h12CF0VLkqLrCrIOIGqGQHnzrKk,26262
65
65
  nedo_vision_worker_core/pipeline/PipelineSyncThread.py,sha256=9eVGsm4rUfr5tpDHRYOshpiaZ5I484AjJdilhffHzls,11692
66
66
  nedo_vision_worker_core/pipeline/__init__.py,sha256=Nqnn8clbgv-5l0PgxcTOldg8mkMKrFn4TvPL-rYUUGg,1
67
67
  nedo_vision_worker_core/preprocessing/ImageResizer.py,sha256=RvOazxe6dJQuiy0ZH4lIGbdFfiu0FLUVCHoMvxkDNT4,1324
@@ -79,11 +79,11 @@ nedo_vision_worker_core/repositories/__init__.py,sha256=Nqnn8clbgv-5l0PgxcTOldg8
79
79
  nedo_vision_worker_core/services/SharedVideoStreamServer.py,sha256=rhCineMKPG3GQbrMHlSHP4xhXaGZ6Rn1oqIajW5xpaY,9827
80
80
  nedo_vision_worker_core/services/VideoSharingDaemon.py,sha256=iY6afEKTOsphfHvmZTL0grezka2DS9DDq-1EIpVMy0Y,28524
81
81
  nedo_vision_worker_core/services/VideoSharingDaemonManager.py,sha256=sc8VZo5iwoOdR8uTiel5BKz6-eZ7wwLy3IwV_3tsAu0,10340
82
- nedo_vision_worker_core/streams/RTMPStreamer.py,sha256=GuvSaY8Siuswrwww0L1yRrOg8N1yITjRPJWy2Vf_Vzs,5821
82
+ nedo_vision_worker_core/streams/RTMPStreamer.py,sha256=GNooWE4V--GiqetqpWSj0TTmOns12DaIpLGy1qHFulc,10167
83
83
  nedo_vision_worker_core/streams/SharedVideoDeviceManager.py,sha256=N2cvlKfemD3OG4zEW5mnyup4JgqbzS_JvP2rbbHEBE0,16183
84
84
  nedo_vision_worker_core/streams/StreamSyncThread.py,sha256=WmYAY9wFiFhLlxGdnvKGIjAqLwCBayNKdmAWzkbU0jM,3763
85
- nedo_vision_worker_core/streams/VideoStream.py,sha256=QZOq9yrdzJXuge8JuvVXvZ8fi8oLQvXcjXX8lrsvr6s,10230
86
- nedo_vision_worker_core/streams/VideoStreamManager.py,sha256=JsZsyDTaCQCifUXYZAPj_tQlLfN28EzUZDbf8AMvNOk,11636
85
+ nedo_vision_worker_core/streams/VideoStream.py,sha256=Mtj5FI4vEy-dhJN2hQZaD9D_OWBbsntVqWMSDfKn4wk,16024
86
+ nedo_vision_worker_core/streams/VideoStreamManager.py,sha256=FpTNRS8F1LvLLYBMwwTeaiOPTz5yyX_HDYtcYJ97utU,12269
87
87
  nedo_vision_worker_core/streams/__init__.py,sha256=Nqnn8clbgv-5l0PgxcTOldg8mkMKrFn4TvPL-rYUUGg,1
88
88
  nedo_vision_worker_core/tracker/SFSORT.py,sha256=0kggw0l4yPZ55AKHdqVX6mu9ehHmJed7jcJ3JQoC4sk,14061
89
89
  nedo_vision_worker_core/tracker/TrackerManager.py,sha256=xtDMI657W2s7HM2lMGtwU0x5Hq74BZpLHd-5xk-278I,6152
@@ -95,8 +95,8 @@ nedo_vision_worker_core/util/PersonAttributeMatcher.py,sha256=PhYTPYSF62Nfuc7dag
95
95
  nedo_vision_worker_core/util/PersonRestrictedAreaMatcher.py,sha256=iuzCU32BQKaZ3dIy0QHNg2yoWJA-XhTRwwYqCvFdDgg,1711
96
96
  nedo_vision_worker_core/util/TablePrinter.py,sha256=wzLGgb1GFMeIbAP6HmKcZD33j4D-IlyqlyeR7C5yD7w,1137
97
97
  nedo_vision_worker_core/util/__init__.py,sha256=Nqnn8clbgv-5l0PgxcTOldg8mkMKrFn4TvPL-rYUUGg,1
98
- nedo_vision_worker_core-0.3.0.dist-info/METADATA,sha256=R5Z_J3h1UkVOwd9y7ivvWEBCX7F4uhxzxwmGQ_UVI-Q,15032
99
- nedo_vision_worker_core-0.3.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
100
- nedo_vision_worker_core-0.3.0.dist-info/entry_points.txt,sha256=pIPafsvPnBw-fpBKBmc1NQCQ6PQY3ad8mZ6mn8_p5FI,70
101
- nedo_vision_worker_core-0.3.0.dist-info/top_level.txt,sha256=y8kusXjVYqtG8MSHYWTrk8bRrvjOrphKXYyzu943TTQ,24
102
- nedo_vision_worker_core-0.3.0.dist-info/RECORD,,
98
+ nedo_vision_worker_core-0.3.2.dist-info/METADATA,sha256=ql_lATGf7e9bMoC80qbVUEqTq6IbqUAt7fk62kfcZEI,14370
99
+ nedo_vision_worker_core-0.3.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
100
+ nedo_vision_worker_core-0.3.2.dist-info/entry_points.txt,sha256=pIPafsvPnBw-fpBKBmc1NQCQ6PQY3ad8mZ6mn8_p5FI,70
101
+ nedo_vision_worker_core-0.3.2.dist-info/top_level.txt,sha256=y8kusXjVYqtG8MSHYWTrk8bRrvjOrphKXYyzu943TTQ,24
102
+ nedo_vision_worker_core-0.3.2.dist-info/RECORD,,