nedo-vision-worker-core 0.3.7__tar.gz → 0.3.9__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of nedo-vision-worker-core might be problematic. Click here for more details.

Files changed (115) hide show
  1. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/PKG-INFO +4 -4
  2. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/__init__.py +1 -1
  3. nedo_vision_worker_core-0.3.9/nedo_vision_worker_core/ai/VideoDebugger.py +89 -0
  4. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/core_service.py +8 -2
  5. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/detection/RFDETRDetector.py +37 -3
  6. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/pipeline/ModelManager.py +40 -5
  7. nedo_vision_worker_core-0.3.9/nedo_vision_worker_core/pipeline/PipelineManager.py +184 -0
  8. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/pipeline/PipelineProcessor.py +141 -29
  9. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/pipeline/PipelineSyncThread.py +1 -1
  10. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core.egg-info/PKG-INFO +4 -4
  11. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core.egg-info/requires.txt +2 -2
  12. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/pyproject.toml +4 -5
  13. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/requirements.txt +0 -1
  14. nedo_vision_worker_core-0.3.7/nedo_vision_worker_core/ai/VideoDebugger.py +0 -69
  15. nedo_vision_worker_core-0.3.7/nedo_vision_worker_core/pipeline/PipelineManager.py +0 -177
  16. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/MANIFEST.in +0 -0
  17. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/README.md +0 -0
  18. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/ai/FrameDrawer.py +0 -0
  19. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/ai/ImageDebugger.py +0 -0
  20. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/ai/__init__.py +0 -0
  21. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/callbacks/DetectionCallbackManager.py +0 -0
  22. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/callbacks/DetectionCallbackTypes.py +0 -0
  23. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/callbacks/__init__.py +0 -0
  24. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/cli.py +0 -0
  25. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/config/ConfigurationManager.py +0 -0
  26. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/config/__init__.py +0 -0
  27. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/database/DatabaseManager.py +0 -0
  28. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/database/__init__.py +0 -0
  29. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/detection/BaseDetector.py +0 -0
  30. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/detection/YOLODetector.py +0 -0
  31. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/detection/__init__.py +0 -0
  32. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/detection/detection_processing/DetectionProcessor.py +0 -0
  33. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/detection/detection_processing/HumanDetectionProcessor.py +0 -0
  34. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/detection/detection_processing/PPEDetectionProcessor.py +0 -0
  35. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/detection/detection_processing/__init__.py +0 -0
  36. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/doctor.py +0 -0
  37. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/drawing_assets/blue/inner_corner.png +0 -0
  38. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/drawing_assets/blue/inner_frame.png +0 -0
  39. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/drawing_assets/blue/line.png +0 -0
  40. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/drawing_assets/blue/top_left.png +0 -0
  41. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/drawing_assets/blue/top_right.png +0 -0
  42. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/drawing_assets/red/inner_corner.png +0 -0
  43. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/drawing_assets/red/inner_frame.png +0 -0
  44. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/drawing_assets/red/line.png +0 -0
  45. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/drawing_assets/red/top_left.png +0 -0
  46. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/drawing_assets/red/top_right.png +0 -0
  47. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/icons/boots-green.png +0 -0
  48. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/icons/boots-red.png +0 -0
  49. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/icons/gloves-green.png +0 -0
  50. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/icons/gloves-red.png +0 -0
  51. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/icons/goggles-green.png +0 -0
  52. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/icons/goggles-red.png +0 -0
  53. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/icons/helmet-green.png +0 -0
  54. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/icons/helmet-red.png +0 -0
  55. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/icons/mask-red.png +0 -0
  56. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/icons/vest-green.png +0 -0
  57. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/icons/vest-red.png +0 -0
  58. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/models/__init__.py +0 -0
  59. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/models/ai_model.py +0 -0
  60. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/models/auth.py +0 -0
  61. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/models/config.py +0 -0
  62. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/models/dataset_source.py +0 -0
  63. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/models/logs.py +0 -0
  64. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/models/ppe_detection.py +0 -0
  65. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/models/ppe_detection_label.py +0 -0
  66. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/models/restricted_area_violation.py +0 -0
  67. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/models/user.py +0 -0
  68. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/models/worker_source.py +0 -0
  69. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/models/worker_source_pipeline.py +0 -0
  70. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/models/worker_source_pipeline_config.py +0 -0
  71. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/models/worker_source_pipeline_debug.py +0 -0
  72. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/models/worker_source_pipeline_detection.py +0 -0
  73. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/pipeline/PipelineConfigManager.py +0 -0
  74. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/pipeline/PipelinePrepocessor.py +0 -0
  75. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/pipeline/__init__.py +0 -0
  76. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/preprocessing/ImageResizer.py +0 -0
  77. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/preprocessing/ImageRoi.py +0 -0
  78. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/preprocessing/Preprocessor.py +0 -0
  79. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/preprocessing/__init__.py +0 -0
  80. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/repositories/AIModelRepository.py +0 -0
  81. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/repositories/BaseRepository.py +0 -0
  82. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/repositories/PPEDetectionRepository.py +0 -0
  83. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/repositories/RestrictedAreaRepository.py +0 -0
  84. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/repositories/WorkerSourcePipelineDebugRepository.py +0 -0
  85. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/repositories/WorkerSourcePipelineDetectionRepository.py +0 -0
  86. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/repositories/WorkerSourcePipelineRepository.py +0 -0
  87. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/repositories/WorkerSourceRepository.py +0 -0
  88. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/repositories/__init__.py +0 -0
  89. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/services/SharedVideoStreamServer.py +0 -0
  90. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/services/VideoSharingDaemon.py +0 -0
  91. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/services/VideoSharingDaemonManager.py +0 -0
  92. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/streams/RTMPStreamer.py +0 -0
  93. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/streams/SharedVideoDeviceManager.py +0 -0
  94. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/streams/StreamSyncThread.py +0 -0
  95. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/streams/VideoStream.py +0 -0
  96. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/streams/VideoStreamManager.py +0 -0
  97. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/streams/__init__.py +0 -0
  98. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/tracker/SFSORT.py +0 -0
  99. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/tracker/TrackerManager.py +0 -0
  100. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/tracker/__init__.py +0 -0
  101. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/util/BoundingBoxMetrics.py +0 -0
  102. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/util/DrawingUtils.py +0 -0
  103. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/util/ModelReadinessChecker.py +0 -0
  104. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/util/PersonAttributeMatcher.py +0 -0
  105. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/util/PersonRestrictedAreaMatcher.py +0 -0
  106. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/util/PipelinePreviewChecker.py +0 -0
  107. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/util/PlatformDetector.py +0 -0
  108. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/util/TablePrinter.py +0 -0
  109. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core/util/__init__.py +0 -0
  110. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core.egg-info/SOURCES.txt +0 -0
  111. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core.egg-info/dependency_links.txt +0 -0
  112. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core.egg-info/entry_points.txt +0 -0
  113. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/nedo_vision_worker_core.egg-info/top_level.txt +0 -0
  114. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/setup.cfg +0 -0
  115. {nedo_vision_worker_core-0.3.7 → nedo_vision_worker_core-0.3.9}/setup.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nedo-vision-worker-core
3
- Version: 0.3.7
3
+ Version: 0.3.9
4
4
  Summary: Nedo Vision Worker Core Library for AI Vision Processing
5
5
  Author-email: Willy Achmat Fauzi <willy.achmat@gmail.com>
6
6
  Maintainer-email: Willy Achmat Fauzi <willy.achmat@gmail.com>
@@ -30,7 +30,7 @@ Classifier: Environment :: No Input/Output (Daemon)
30
30
  Requires-Python: >=3.8
31
31
  Description-Content-Type: text/markdown
32
32
  Requires-Dist: alembic>=1.8.0
33
- Requires-Dist: numpy>=1.21.0
33
+ Requires-Dist: numpy<2.0.0,>=1.21.0
34
34
  Requires-Dist: pillow>=8.0.0
35
35
  Requires-Dist: psutil>=5.9.0
36
36
  Requires-Dist: scipy>=1.9.0
@@ -40,8 +40,6 @@ Requires-Dist: torch>=1.9.0
40
40
  Requires-Dist: torchvision>=0.10.0
41
41
  Requires-Dist: ultralytics>=8.0.0
42
42
  Requires-Dist: rfdetr<2.0.0,>=1.2.0
43
- Provides-Extra: opencv
44
- Requires-Dist: opencv-python>=4.6.0; extra == "opencv"
45
43
  Provides-Extra: dev
46
44
  Requires-Dist: pytest>=7.0.0; extra == "dev"
47
45
  Requires-Dist: black>=22.0.0; extra == "dev"
@@ -49,6 +47,8 @@ Requires-Dist: isort>=5.10.0; extra == "dev"
49
47
  Requires-Dist: mypy>=0.950; extra == "dev"
50
48
  Requires-Dist: flake8>=4.0.0; extra == "dev"
51
49
  Requires-Dist: pre-commit>=2.17.0; extra == "dev"
50
+ Provides-Extra: opencv
51
+ Requires-Dist: opencv-python<5.0.0,>=4.6.0; extra == "opencv"
52
52
 
53
53
  # Nedo Vision Worker Core
54
54
 
@@ -7,7 +7,7 @@ A library for running AI vision processing and detection in the Nedo Vision plat
7
7
  from .core_service import CoreService
8
8
  from .callbacks import DetectionType, CallbackTrigger, DetectionData, IntervalMetadata
9
9
 
10
- __version__ = "0.3.7"
10
+ __version__ = "0.3.9"
11
11
  __all__ = [
12
12
  "CoreService",
13
13
  "DetectionType",
@@ -0,0 +1,89 @@
1
+ import cv2
2
+ import threading
3
+ import time
4
+ from collections import defaultdict
5
+ import logging
6
+
7
+ class VideoDebugger:
8
+ """Real-time visualization of video streams with detections."""
9
+
10
+ def __init__(self, enable_visualization=True):
11
+ self.enable_visualization = enable_visualization
12
+ self.windows = {}
13
+ self.lock = threading.Lock()
14
+ self.fps_tracker = defaultdict(lambda: {"start_time": time.time(), "frame_count": 0})
15
+ self._cv_lock = threading.Lock() # Prevent OpenCV segfaults
16
+
17
+ def show_frame(self, pipeline_id, worker_source_id, frame):
18
+ """Display frame with FPS overlay."""
19
+ if not self.enable_visualization or frame is None:
20
+ return
21
+
22
+ window_name = f"Pipeline {pipeline_id} - {worker_source_id}"
23
+
24
+ try:
25
+ with self.lock:
26
+ if window_name not in self.fps_tracker:
27
+ self.fps_tracker[window_name] = {"start_time": time.time(), "frame_count": 0}
28
+
29
+ self.fps_tracker[window_name]["frame_count"] += 1
30
+ elapsed_time = time.time() - self.fps_tracker[window_name]["start_time"]
31
+ fps = self.fps_tracker[window_name]["frame_count"] / max(elapsed_time, 1e-5)
32
+
33
+ cv2.putText(frame, f"FPS: {fps:.2f}", (10, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 255), 1)
34
+
35
+ if window_name not in self.windows:
36
+ self.windows[window_name] = True
37
+
38
+ # Serialize cv2 calls to prevent segfaults
39
+ with self._cv_lock:
40
+ try:
41
+ cv2.imshow(window_name, frame)
42
+ key = cv2.waitKey(1) & 0xFF
43
+
44
+ if key == ord('q'):
45
+ self.close_window(window_name)
46
+ except Exception as e:
47
+ logging.error(f"Error displaying frame for {window_name}: {e}")
48
+
49
+ except Exception as e:
50
+ logging.error(f"Error in show_frame for {window_name}: {e}")
51
+
52
+ def close_window(self, window_name):
53
+ """Close specific window."""
54
+ with self.lock:
55
+ if window_name in self.windows:
56
+ with self._cv_lock:
57
+ try:
58
+ cv2.destroyWindow(window_name)
59
+ except Exception as e:
60
+ logging.error(f"Error closing window {window_name}: {e}")
61
+ del self.windows[window_name]
62
+
63
+ def is_window_open(self, pipeline_id):
64
+ """Check if a window is open for a given pipeline."""
65
+ with self.lock:
66
+ # Check if any window exists for this pipeline
67
+ for window_name in self.windows.keys():
68
+ if f"Pipeline {pipeline_id}" in window_name:
69
+ return True
70
+ return False
71
+
72
+ def close_all(self):
73
+ """Close all windows."""
74
+ with self.lock:
75
+ window_list = list(self.windows.keys())
76
+
77
+ with self._cv_lock:
78
+ try:
79
+ for window in window_list:
80
+ try:
81
+ cv2.destroyWindow(window)
82
+ except Exception as e:
83
+ logging.debug(f"Error destroying window {window}: {e}")
84
+ cv2.waitKey(1)
85
+ except Exception as e:
86
+ logging.error(f"Error in close_all: {e}")
87
+
88
+ with self.lock:
89
+ self.windows.clear()
@@ -32,7 +32,8 @@ class CoreService:
32
32
  log_level: str = "INFO",
33
33
  storage_path: str = "data",
34
34
  rtmp_server: str = "rtmp://live.vision.sindika.co.id:1935/live",
35
- enable_video_sharing_daemon: bool = True):
35
+ enable_video_sharing_daemon: bool = True,
36
+ max_pipeline_workers: int = None):
36
37
  """
37
38
  Initialize the Core Service.
38
39
 
@@ -42,12 +43,14 @@ class CoreService:
42
43
  storage_path: Storage path for databases and files (default: data)
43
44
  rtmp_server: RTMP server URL for video streaming (default: rtmp://localhost:1935/live)
44
45
  enable_video_sharing_daemon: Enable automatic video sharing daemon management (default: True)
46
+ max_pipeline_workers: Maximum concurrent pipeline workers (default: auto-detect based on CPU cores)
45
47
  """
46
48
  self.running = True
47
49
  self.video_manager = None
48
50
  self.stream_sync_thread = None
49
51
  self.pipeline_sync_thread = None
50
52
  self.enable_video_sharing_daemon = enable_video_sharing_daemon
53
+ self.max_pipeline_workers = max_pipeline_workers
51
54
 
52
55
  # Initialize callback manager if not already done
53
56
  if CoreService._callback_manager is None:
@@ -230,7 +233,10 @@ class CoreService:
230
233
  self.stream_sync_thread.start()
231
234
 
232
235
  # Start pipeline synchronization thread (AI processing)
233
- self.pipeline_sync_thread = PipelineSyncThread(self.video_manager)
236
+ self.pipeline_sync_thread = PipelineSyncThread(
237
+ self.video_manager,
238
+ max_workers=self.max_pipeline_workers
239
+ )
234
240
  self.pipeline_sync_thread.start()
235
241
 
236
242
  logging.info("✅ Nedo Vision Core initialized and running.")
@@ -1,11 +1,11 @@
1
1
  import cv2
2
2
  import logging
3
3
  try:
4
- from rfdetr import RFDETRBase
4
+ from rfdetr import RFDETRNano, RFDETRSmall, RFDETRMedium, RFDETRBase, RFDETRLarge
5
5
  RFDETR_AVAILABLE = True
6
6
  except ImportError:
7
7
  RFDETR_AVAILABLE = False
8
- RFDETRBase = None
8
+ RFDETRNano = RFDETRSmall = RFDETRMedium = RFDETRBase = RFDETRLarge = None
9
9
 
10
10
  from ..database.DatabaseManager import DatabaseManager
11
11
  from ..models.ai_model import AIModelEntity
@@ -32,6 +32,32 @@ class RFDETRDetector(BaseDetector):
32
32
  if model:
33
33
  self.load_model(model)
34
34
 
35
+ def _detect_model_variant(self, model_path: str):
36
+ """
37
+ Automatically detect the correct RF-DETR variant by trying to load the weights.
38
+ Returns the appropriate RF-DETR class or None if all attempts fail.
39
+ """
40
+ variants = [
41
+ ("Nano", RFDETRNano),
42
+ ("Small", RFDETRSmall),
43
+ ("Medium", RFDETRMedium),
44
+ ("Base", RFDETRBase),
45
+ ("Large", RFDETRLarge)
46
+ ]
47
+
48
+ for variant_name, variant_class in variants:
49
+ try:
50
+ logging.info(f"🔍 Trying RF-DETR {variant_name} variant...")
51
+ temp_model = variant_class(pretrain_weights=model_path)
52
+ logging.info(f"✅ Successfully loaded RF-DETR {variant_name} variant")
53
+ return temp_model, variant_name
54
+ except Exception as e:
55
+ # Only log at debug level to avoid cluttering logs
56
+ logging.debug(f"RF-DETR {variant_name} variant failed: {e}")
57
+ continue
58
+
59
+ return None, None
60
+
35
61
  def load_model(self, model: AIModelEntity):
36
62
  if not isinstance(model, AIModelEntity):
37
63
  raise TypeError("model must be an instance of AIModelEntity")
@@ -44,8 +70,16 @@ class RFDETRDetector(BaseDetector):
44
70
  return False
45
71
 
46
72
  try:
47
- self.model = RFDETRBase(pretrain_weights=path.as_posix())
73
+ loaded_model, variant_name = self._detect_model_variant(path.as_posix())
74
+
75
+ if loaded_model is None:
76
+ logging.error(f"❌ Could not load model with any RF-DETR variant")
77
+ self.model = None
78
+ return False
79
+
80
+ self.model = loaded_model
48
81
  self.model.optimize_for_inference()
82
+ logging.info(f"✅ Loaded {model.name} using RF-DETR {variant_name}")
49
83
  return True
50
84
  except Exception as e:
51
85
  logging.error(f"❌ Error loading RFDETR model {model.name}: {e}")
@@ -122,16 +122,51 @@ class ModelManager:
122
122
  return False
123
123
 
124
124
  def sync_cache(self, active_model_ids: Set[str]):
125
- """
126
- Removes detectors from the cache if their corresponding models are no longer in the database
127
- or are not being used by any active pipeline.
128
- """
125
+ """Remove unused detectors from cache."""
129
126
  cached_ids = set(self._detector_cache.keys())
130
127
  stale_ids = cached_ids - active_model_ids
131
128
 
132
129
  for model_id in stale_ids:
133
- del self._detector_cache[model_id]
130
+ detector = self._detector_cache.pop(model_id, None)
131
+ if detector:
132
+ self._cleanup_detector(detector)
134
133
  logging.info(f"🧹 Removed unused detector for model {model_id} from cache.")
134
+
135
+ def _cleanup_detector(self, detector: BaseDetector):
136
+ """Free detector resources and GPU memory."""
137
+ try:
138
+ if hasattr(detector, 'model') and detector.model is not None:
139
+ # Move model to CPU if possible
140
+ if hasattr(detector.model, 'cpu'):
141
+ try:
142
+ detector.model.cpu()
143
+ except Exception as e:
144
+ logging.debug(f"Error moving model to CPU: {e}")
145
+
146
+ if hasattr(detector.model, 'eval'):
147
+ try:
148
+ detector.model.eval()
149
+ except Exception:
150
+ pass
151
+
152
+ detector.model = None
153
+
154
+ detector.metadata = None
155
+
156
+ # Force garbage collection and clear GPU cache
157
+ import gc
158
+ gc.collect()
159
+
160
+ try:
161
+ import torch
162
+ if torch.cuda.is_available():
163
+ torch.cuda.empty_cache()
164
+ logging.debug("🧹 GPU cache cleared")
165
+ except ImportError:
166
+ pass
167
+
168
+ except Exception as e:
169
+ logging.error(f"Error cleaning up detector: {e}")
135
170
 
136
171
  def clear_cache(self):
137
172
  """Clears the detector cache."""
@@ -0,0 +1,184 @@
1
+ import logging
2
+ import time
3
+ import threading
4
+ from typing import Dict
5
+ from concurrent.futures import ThreadPoolExecutor
6
+ from .PipelineProcessor import PipelineProcessor
7
+ from ..streams.VideoStreamManager import VideoStreamManager
8
+
9
+ class PipelineManager:
10
+ """Manages AI pipeline execution with thread pooling for scalability."""
11
+
12
+ def __init__(self, video_manager: VideoStreamManager, on_pipeline_stopped, max_workers=None):
13
+ # Auto-detect optimal worker count if not specified
14
+ if max_workers is None:
15
+ import os
16
+ cpu_count = os.cpu_count() or 4
17
+ # Reserve 2 cores for system/video streams, use rest for pipelines
18
+ max_workers = max(4, cpu_count - 2)
19
+
20
+ self.max_workers = max_workers
21
+ self.executor = ThreadPoolExecutor(
22
+ max_workers=max_workers,
23
+ thread_name_prefix="pipeline-worker"
24
+ )
25
+ self.pipeline_futures = {} # {pipeline_id: Future}
26
+ self.pipeline_metadata = {} # {pipeline_id: metadata}
27
+ self.video_manager = video_manager
28
+ self.processors: Dict[str, PipelineProcessor] = {} # {pipeline_id: PipelineProcessor}
29
+ self.running = True
30
+ self._stopping_pipelines = set()
31
+ self._stop_lock = threading.Lock()
32
+ self.on_pipeline_stopped = on_pipeline_stopped
33
+
34
+ logging.info(f"🚀 PipelineManager initialized with {max_workers} worker threads")
35
+
36
+ def start_pipeline(self, pipeline, detector):
37
+ """Start a pipeline processing."""
38
+ pipeline_id = pipeline.id
39
+ worker_source_id = pipeline.worker_source_id
40
+
41
+ if not self.running:
42
+ logging.warning(f"⚠️ Attempt to start pipeline {pipeline_id} after shutdown.")
43
+ return
44
+
45
+ if self.is_running(pipeline_id):
46
+ logging.warning(f"⚠️ Pipeline {pipeline_id} is already running.")
47
+ return
48
+
49
+ logging.info(f"🚀 Starting Pipeline processing for pipeline: {pipeline_id} | Source: {worker_source_id} ({pipeline.name})")
50
+
51
+ # Acquire video stream
52
+ if not self.video_manager.acquire_stream(worker_source_id, pipeline_id):
53
+ logging.error(f"❌ Failed to acquire stream {worker_source_id} for pipeline {pipeline_id}")
54
+ return
55
+
56
+ processor = PipelineProcessor(pipeline, detector, False)
57
+ processor.frame_drawer.location_name = pipeline.location_name
58
+ self.processors[pipeline_id] = processor
59
+
60
+ active_count = len([f for f in self.pipeline_futures.values() if not f.done()])
61
+ logging.info(f"📋 Submitting pipeline {pipeline_id} to thread pool (active: {active_count}/{self.max_workers})")
62
+
63
+ try:
64
+ # Submit to thread pool instead of creating dedicated thread
65
+ future = self.executor.submit(
66
+ self._pipeline_worker,
67
+ pipeline_id,
68
+ processor
69
+ )
70
+
71
+ # Add completion callback
72
+ future.add_done_callback(lambda f: self._handle_pipeline_completion(pipeline_id, f))
73
+
74
+ self.pipeline_futures[pipeline_id] = future
75
+ self.pipeline_metadata[pipeline_id] = pipeline
76
+
77
+ logging.info(f"✅ Pipeline {pipeline_id} submitted to thread pool")
78
+
79
+ except Exception as e:
80
+ logging.error(f"❌ Failed to submit pipeline {pipeline_id} to thread pool: {e}", exc_info=True)
81
+ self.processors.pop(pipeline_id, None)
82
+ self.video_manager.release_stream(worker_source_id, pipeline_id)
83
+ raise
84
+
85
+ def _pipeline_worker(self, pipeline_id: str, processor: PipelineProcessor):
86
+ """Worker function executed in thread pool."""
87
+ try:
88
+ logging.info(f"🏁 Pipeline {pipeline_id} worker starting...")
89
+ processor.process_pipeline(self.video_manager)
90
+ except Exception as e:
91
+ logging.error(f"❌ Unhandled error in pipeline {pipeline_id} worker: {e}", exc_info=True)
92
+ finally:
93
+ logging.info(f"🏁 Pipeline {pipeline_id} worker finished")
94
+
95
+ def _handle_pipeline_completion(self, pipeline_id: str, future=None):
96
+ """Handle cleanup when pipeline finishes."""
97
+ with self._stop_lock:
98
+ if pipeline_id in self._stopping_pipelines:
99
+ return
100
+
101
+ try:
102
+ logging.info(f"🏁 Pipeline {pipeline_id} completed execution")
103
+
104
+ # Log any exception from the future
105
+ if future and not future.cancelled():
106
+ try:
107
+ future.result(timeout=0)
108
+ except Exception as e:
109
+ logging.error(f"Pipeline {pipeline_id} ended with exception: {e}")
110
+ except Exception as e:
111
+ logging.error(f"⚠️ Error in handling pipeline {pipeline_id} completion: {e}")
112
+ finally:
113
+ self.on_pipeline_stopped(pipeline_id)
114
+
115
+ def stop_pipeline(self, pipeline_id: str):
116
+ """Stop an AI processing pipeline."""
117
+ with self._stop_lock:
118
+ if pipeline_id in self._stopping_pipelines:
119
+ logging.debug(f"Pipeline {pipeline_id} already being stopped, skipping")
120
+ return
121
+ self._stopping_pipelines.add(pipeline_id)
122
+
123
+ try:
124
+ pipeline = self.pipeline_metadata.get(pipeline_id)
125
+ worker_source_id = pipeline.worker_source_id if pipeline else None
126
+
127
+ # Stop processor first to signal threads
128
+ processor = self.processors.pop(pipeline_id, None)
129
+ if processor:
130
+ processor.stop()
131
+
132
+ # Cancel future if still pending/running
133
+ future = self.pipeline_futures.pop(pipeline_id, None)
134
+ if future and not future.done():
135
+ logging.debug(f"Cancelling future for pipeline {pipeline_id}")
136
+ future.cancel()
137
+
138
+ # Wait briefly for graceful shutdown
139
+ try:
140
+ future.result(timeout=1.0)
141
+ except Exception as e:
142
+ logging.debug(f"Pipeline {pipeline_id} future ended: {e}")
143
+
144
+ self.pipeline_metadata.pop(pipeline_id, None)
145
+
146
+ # Release video stream
147
+ if worker_source_id:
148
+ self.video_manager.release_stream(worker_source_id, pipeline_id)
149
+
150
+ logging.info(f"✅ Pipeline {pipeline_id} stopped successfully.")
151
+
152
+ except Exception as e:
153
+ logging.error(f"❌ Error during pipeline shutdown: {e}")
154
+
155
+ finally:
156
+ self._stopping_pipelines.discard(pipeline_id)
157
+ self.on_pipeline_stopped(pipeline_id)
158
+
159
+ def get_active_pipelines(self):
160
+ """Returns a list of active pipeline IDs."""
161
+ return list(self.pipeline_metadata.keys())
162
+
163
+ def get_pipeline(self, pipeline_id):
164
+ """Returns the pipeline metadata."""
165
+ return self.pipeline_metadata.get(pipeline_id, None)
166
+
167
+ def is_running(self, pipeline_id):
168
+ """Check if pipeline is currently running."""
169
+ future = self.pipeline_futures.get(pipeline_id)
170
+ return future is not None and not future.done()
171
+
172
+ def shutdown(self):
173
+ """Shuts down the pipeline manager gracefully."""
174
+ logging.info("🛑 Shutting down PipelineManager...")
175
+ self.running = False
176
+
177
+ # Stop all pipelines
178
+ for pipeline_id in list(self.pipeline_futures.keys()):
179
+ self.stop_pipeline(pipeline_id)
180
+
181
+ # Shutdown thread pool
182
+ logging.info("🛑 Shutting down thread pool executor...")
183
+ self.executor.shutdown(wait=True)
184
+ logging.info("✅ PipelineManager stopped.")