nedo-vision-worker-core 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of nedo-vision-worker-core might be problematic. Click here for more details.

Files changed (95) hide show
  1. nedo_vision_worker_core/__init__.py +23 -0
  2. nedo_vision_worker_core/ai/FrameDrawer.py +144 -0
  3. nedo_vision_worker_core/ai/ImageDebugger.py +126 -0
  4. nedo_vision_worker_core/ai/VideoDebugger.py +69 -0
  5. nedo_vision_worker_core/ai/__init__.py +1 -0
  6. nedo_vision_worker_core/cli.py +197 -0
  7. nedo_vision_worker_core/config/ConfigurationManager.py +173 -0
  8. nedo_vision_worker_core/config/__init__.py +1 -0
  9. nedo_vision_worker_core/core_service.py +237 -0
  10. nedo_vision_worker_core/database/DatabaseManager.py +236 -0
  11. nedo_vision_worker_core/database/__init__.py +1 -0
  12. nedo_vision_worker_core/detection/BaseDetector.py +22 -0
  13. nedo_vision_worker_core/detection/DetectionManager.py +83 -0
  14. nedo_vision_worker_core/detection/RFDETRDetector.py +62 -0
  15. nedo_vision_worker_core/detection/YOLODetector.py +57 -0
  16. nedo_vision_worker_core/detection/__init__.py +1 -0
  17. nedo_vision_worker_core/detection/detection_processing/DetectionProcessor.py +29 -0
  18. nedo_vision_worker_core/detection/detection_processing/HumanDetectionProcessor.py +47 -0
  19. nedo_vision_worker_core/detection/detection_processing/PPEDetectionProcessor.py +44 -0
  20. nedo_vision_worker_core/detection/detection_processing/__init__.py +1 -0
  21. nedo_vision_worker_core/doctor.py +342 -0
  22. nedo_vision_worker_core/drawing_assets/blue/inner_corner.png +0 -0
  23. nedo_vision_worker_core/drawing_assets/blue/inner_frame.png +0 -0
  24. nedo_vision_worker_core/drawing_assets/blue/line.png +0 -0
  25. nedo_vision_worker_core/drawing_assets/blue/top_left.png +0 -0
  26. nedo_vision_worker_core/drawing_assets/blue/top_right.png +0 -0
  27. nedo_vision_worker_core/drawing_assets/red/inner_corner.png +0 -0
  28. nedo_vision_worker_core/drawing_assets/red/inner_frame.png +0 -0
  29. nedo_vision_worker_core/drawing_assets/red/line.png +0 -0
  30. nedo_vision_worker_core/drawing_assets/red/top_left.png +0 -0
  31. nedo_vision_worker_core/drawing_assets/red/top_right.png +0 -0
  32. nedo_vision_worker_core/icons/boots-green.png +0 -0
  33. nedo_vision_worker_core/icons/boots-red.png +0 -0
  34. nedo_vision_worker_core/icons/gloves-green.png +0 -0
  35. nedo_vision_worker_core/icons/gloves-red.png +0 -0
  36. nedo_vision_worker_core/icons/goggles-green.png +0 -0
  37. nedo_vision_worker_core/icons/goggles-red.png +0 -0
  38. nedo_vision_worker_core/icons/helmet-green.png +0 -0
  39. nedo_vision_worker_core/icons/helmet-red.png +0 -0
  40. nedo_vision_worker_core/icons/mask-red.png +0 -0
  41. nedo_vision_worker_core/icons/vest-green.png +0 -0
  42. nedo_vision_worker_core/icons/vest-red.png +0 -0
  43. nedo_vision_worker_core/models/__init__.py +20 -0
  44. nedo_vision_worker_core/models/ai_model.py +41 -0
  45. nedo_vision_worker_core/models/auth.py +14 -0
  46. nedo_vision_worker_core/models/config.py +9 -0
  47. nedo_vision_worker_core/models/dataset_source.py +30 -0
  48. nedo_vision_worker_core/models/logs.py +9 -0
  49. nedo_vision_worker_core/models/ppe_detection.py +39 -0
  50. nedo_vision_worker_core/models/ppe_detection_label.py +20 -0
  51. nedo_vision_worker_core/models/restricted_area_violation.py +20 -0
  52. nedo_vision_worker_core/models/user.py +10 -0
  53. nedo_vision_worker_core/models/worker_source.py +19 -0
  54. nedo_vision_worker_core/models/worker_source_pipeline.py +21 -0
  55. nedo_vision_worker_core/models/worker_source_pipeline_config.py +24 -0
  56. nedo_vision_worker_core/models/worker_source_pipeline_debug.py +15 -0
  57. nedo_vision_worker_core/models/worker_source_pipeline_detection.py +14 -0
  58. nedo_vision_worker_core/pipeline/PipelineConfigManager.py +32 -0
  59. nedo_vision_worker_core/pipeline/PipelineManager.py +133 -0
  60. nedo_vision_worker_core/pipeline/PipelinePrepocessor.py +40 -0
  61. nedo_vision_worker_core/pipeline/PipelineProcessor.py +338 -0
  62. nedo_vision_worker_core/pipeline/PipelineSyncThread.py +202 -0
  63. nedo_vision_worker_core/pipeline/__init__.py +1 -0
  64. nedo_vision_worker_core/preprocessing/ImageResizer.py +42 -0
  65. nedo_vision_worker_core/preprocessing/ImageRoi.py +61 -0
  66. nedo_vision_worker_core/preprocessing/Preprocessor.py +16 -0
  67. nedo_vision_worker_core/preprocessing/__init__.py +1 -0
  68. nedo_vision_worker_core/repositories/AIModelRepository.py +31 -0
  69. nedo_vision_worker_core/repositories/PPEDetectionRepository.py +146 -0
  70. nedo_vision_worker_core/repositories/RestrictedAreaRepository.py +90 -0
  71. nedo_vision_worker_core/repositories/WorkerSourcePipelineDebugRepository.py +81 -0
  72. nedo_vision_worker_core/repositories/WorkerSourcePipelineDetectionRepository.py +71 -0
  73. nedo_vision_worker_core/repositories/WorkerSourcePipelineRepository.py +79 -0
  74. nedo_vision_worker_core/repositories/WorkerSourceRepository.py +19 -0
  75. nedo_vision_worker_core/repositories/__init__.py +1 -0
  76. nedo_vision_worker_core/streams/RTMPStreamer.py +146 -0
  77. nedo_vision_worker_core/streams/StreamSyncThread.py +66 -0
  78. nedo_vision_worker_core/streams/VideoStream.py +324 -0
  79. nedo_vision_worker_core/streams/VideoStreamManager.py +121 -0
  80. nedo_vision_worker_core/streams/__init__.py +1 -0
  81. nedo_vision_worker_core/tracker/SFSORT.py +325 -0
  82. nedo_vision_worker_core/tracker/TrackerManager.py +163 -0
  83. nedo_vision_worker_core/tracker/__init__.py +1 -0
  84. nedo_vision_worker_core/util/BoundingBoxMetrics.py +53 -0
  85. nedo_vision_worker_core/util/DrawingUtils.py +354 -0
  86. nedo_vision_worker_core/util/ModelReadinessChecker.py +188 -0
  87. nedo_vision_worker_core/util/PersonAttributeMatcher.py +70 -0
  88. nedo_vision_worker_core/util/PersonRestrictedAreaMatcher.py +45 -0
  89. nedo_vision_worker_core/util/TablePrinter.py +28 -0
  90. nedo_vision_worker_core/util/__init__.py +1 -0
  91. nedo_vision_worker_core-0.2.0.dist-info/METADATA +347 -0
  92. nedo_vision_worker_core-0.2.0.dist-info/RECORD +95 -0
  93. nedo_vision_worker_core-0.2.0.dist-info/WHEEL +5 -0
  94. nedo_vision_worker_core-0.2.0.dist-info/entry_points.txt +2 -0
  95. nedo_vision_worker_core-0.2.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,20 @@
1
+ # Import all models to ensure they are registered with SQLAlchemy Base registry
2
+ # This prevents the auto-migration system from thinking these models don't exist
3
+
4
+ from .ai_model import AIModelEntity
5
+ from .auth import AuthEntity
6
+ from .config import ConfigEntity
7
+ from .dataset_source import DatasetSourceEntity
8
+ from .logs import LogEntity
9
+ from .ppe_detection import PPEDetectionEntity
10
+ from .ppe_detection_label import PPEDetectionLabelEntity
11
+ from .restricted_area_violation import RestrictedAreaViolationEntity
12
+ from .user import UserEntity
13
+ from .worker_source import WorkerSourceEntity
14
+ from .worker_source_pipeline import WorkerSourcePipelineEntity
15
+ from .worker_source_pipeline_config import WorkerSourcePipelineConfigEntity
16
+ from .worker_source_pipeline_debug import WorkerSourcePipelineDebugEntity
17
+ from .worker_source_pipeline_detection import WorkerSourcePipelineDetectionEntity
18
+
19
+ # This ensures all models are imported and registered with SQLAlchemy's Base registry
20
+ # when the models package is imported during database initialization
@@ -0,0 +1,41 @@
1
+ import uuid
2
+ from sqlalchemy import Column, String, DateTime
3
+ from datetime import datetime
4
+ from ..database.DatabaseManager import Base
5
+
6
+ class AIModelEntity(Base):
7
+ __tablename__ = "ai_model"
8
+ __bind_key__ = "default"
9
+
10
+ id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
11
+ file = Column(String, nullable=False)
12
+ type = Column(String, nullable=False)
13
+ name = Column(String, nullable=False)
14
+ version = Column(String, nullable=False)
15
+ download_status = Column(String, nullable=True, default="completed") # pending, downloading, completed, failed
16
+ last_download_attempt = Column(DateTime, nullable=True)
17
+ download_error = Column(String, nullable=True)
18
+
19
+ def __repr__(self):
20
+ return (
21
+ f"<AIModelEntity(id={self.id}, name={self.name}, type={self.type}, "
22
+ f"file={self.file}, version={self.version})>"
23
+ )
24
+
25
+ def __str__(self):
26
+ return (
27
+ f"AIModelEntity(id={self.id}, name={self.name}, type={self.type}, "
28
+ f"file={self.file}, version={self.version}, status={self.download_status})"
29
+ )
30
+
31
+ def is_ready_for_use(self) -> bool:
32
+ """Check if the model is ready for use (downloaded and available)."""
33
+ return self.download_status == "completed"
34
+
35
+ def is_downloading(self) -> bool:
36
+ """Check if the model is currently being downloaded."""
37
+ return self.download_status in ["pending", "downloading"]
38
+
39
+ def has_download_failed(self) -> bool:
40
+ """Check if the model download has failed."""
41
+ return self.download_status == "failed"
@@ -0,0 +1,14 @@
1
+ from sqlalchemy import Column, String, Integer
2
+ from ..database.DatabaseManager import Base
3
+
4
+ class AuthEntity(Base):
5
+ __tablename__ = "auth"
6
+ id = Column(Integer, primary_key=True)
7
+ username = Column(String, nullable=False)
8
+ password = Column(String, nullable=False)
9
+
10
+ def __repr__(self):
11
+ return f"<AuthEntity(id={self.id}, username={self.username})>"
12
+
13
+ def to_dict(self):
14
+ return {"id": self.id, "username": self.username}
@@ -0,0 +1,9 @@
1
+ from sqlalchemy import Column, String
2
+ from ..database.DatabaseManager import Base
3
+
4
+ class ConfigEntity(Base):
5
+ __tablename__ = "server_config"
6
+ __bind_key__ = "config"
7
+
8
+ key = Column(String, primary_key=True)
9
+ value = Column(String, nullable=False)
@@ -0,0 +1,30 @@
1
+ from sqlalchemy import Column, String, Integer
2
+ from ..database.DatabaseManager import Base
3
+
4
+ class DatasetSourceEntity(Base):
5
+ __tablename__ = "dataset_sources"
6
+ __bind_key__ = "default"
7
+
8
+ id = Column(String, primary_key=True)
9
+ dataset_id = Column(String, nullable=False)
10
+ worker_source_id = Column(String, nullable=False)
11
+ sampling_interval = Column(Integer, nullable=False)
12
+ dataset_name = Column(String, nullable=False)
13
+ worker_source_name = Column(String, nullable=False)
14
+ worker_source_url = Column(String, nullable=False)
15
+
16
+ def __repr__(self):
17
+ return (
18
+ f"<DatasetSourceEntity(id={self.id}, dataset_id={self.dataset_id}, "
19
+ f"worker_source_id={self.worker_source_id}, sampling_interval={self.sampling_interval}, "
20
+ f"dataset_name={self.dataset_name}, worker_source_name={self.worker_source_name}, "
21
+ f"worker_source_url={self.worker_source_url})>"
22
+ )
23
+
24
+ def __str__(self):
25
+ return (
26
+ f"DatasetSourceEntity(id={self.id}, dataset_id={self.dataset_id}, "
27
+ f"worker_source_id={self.worker_source_id}, sampling_interval={self.sampling_interval}, "
28
+ f"dataset_name={self.dataset_name}, worker_source_name={self.worker_source_name}, "
29
+ f"worker_source_url={self.worker_source_url})"
30
+ )
@@ -0,0 +1,9 @@
1
+ from sqlalchemy import Column, String, Integer
2
+ from ..database.DatabaseManager import Base
3
+
4
+ class LogEntity(Base):
5
+ __tablename__ = "logs"
6
+ __bind_key__ = "logging"
7
+
8
+ id = Column(Integer, primary_key=True, autoincrement=True)
9
+ message = Column(String, nullable=False)
@@ -0,0 +1,39 @@
1
+ import uuid
2
+ import datetime
3
+ from sqlalchemy import Column, String, ForeignKey, DateTime, Float, Integer
4
+ from sqlalchemy.orm import relationship
5
+ from ..database.DatabaseManager import Base
6
+
7
+ class PPEDetectionEntity(Base):
8
+ __tablename__ = "ppe_detections"
9
+ __bind_key__ = "default"
10
+
11
+ id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
12
+ worker_id = Column(String, nullable=False)
13
+ worker_source_id = Column(String, nullable=False)
14
+ person_id = Column(String, nullable=False)
15
+ image_path = Column(String, nullable=False)
16
+ image_tile_path = Column(String, nullable=False)
17
+ detection_count = Column(Integer, nullable=False, default=0) # Tracks total detections before saving
18
+ created_at = Column(DateTime, default=datetime.datetime.utcnow)
19
+ b_box_x1 = Column(Float, nullable=False)
20
+ b_box_y1 = Column(Float, nullable=False)
21
+ b_box_x2 = Column(Float, nullable=False)
22
+ b_box_y2 = Column(Float, nullable=False)
23
+
24
+ ppe_labels = relationship("PPEDetectionLabelEntity", back_populates="detection")
25
+
26
+ def __repr__(self):
27
+ return (
28
+ f"<PPEDetectionEntity(id={self.id}, worker_id={self.worker_id}, "
29
+ f"worker_source_id={self.worker_source_id}, person_id={self.person_id}, "
30
+ f"image_path={self.image_path}, detection_count={self.detection_count}, "
31
+ f"created_at={self.created_at})>"
32
+ )
33
+
34
+ def __str__(self):
35
+ return (
36
+ f"PPEDetectionEntity(id={self.id}, worker_id={self.worker_id}, "
37
+ f"worker_source_id={self.worker_source_id}, person_id={self.person_id}, "
38
+ f"detection_count={self.detection_count}, created_at={self.created_at})"
39
+ )
@@ -0,0 +1,20 @@
1
+ import uuid
2
+ from sqlalchemy import Column, String, ForeignKey, DateTime, Float, Integer
3
+ from sqlalchemy.orm import relationship
4
+ from ..database.DatabaseManager import Base
5
+
6
+ class PPEDetectionLabelEntity(Base):
7
+ __tablename__ = "ppe_detection_labels"
8
+ __bind_key__ = "default"
9
+
10
+ id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
11
+ detection_id = Column(String, ForeignKey("ppe_detections.id"), nullable=False)
12
+ code = Column(String, nullable=False) # helmet, vest, etc.
13
+ confidence_score = Column(Float, nullable=False)
14
+ detection_count = Column(Integer, nullable=False, default=0)
15
+ b_box_x1 = Column(Float, nullable=False)
16
+ b_box_y1 = Column(Float, nullable=False)
17
+ b_box_x2 = Column(Float, nullable=False)
18
+ b_box_y2 = Column(Float, nullable=False)
19
+
20
+ detection = relationship("PPEDetectionEntity", back_populates="ppe_labels")
@@ -0,0 +1,20 @@
1
+ import uuid
2
+ import datetime
3
+ from sqlalchemy import Column, Float, String, DateTime
4
+ from ..database.DatabaseManager import Base
5
+
6
+ class RestrictedAreaViolationEntity(Base):
7
+ __tablename__ = "restricted_area_violation"
8
+ __bind_key__ = "default"
9
+
10
+ id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
11
+ worker_source_id = Column(String, nullable=False)
12
+ person_id = Column(String, nullable=False)
13
+ image_path = Column(String, nullable=False)
14
+ image_tile_path = Column(String, nullable=False)
15
+ confidence_score = Column(Float, nullable=False)
16
+ created_at = Column(DateTime, default=datetime.datetime.utcnow)
17
+ b_box_x1 = Column(Float, nullable=False)
18
+ b_box_y1 = Column(Float, nullable=False)
19
+ b_box_x2 = Column(Float, nullable=False)
20
+ b_box_y2 = Column(Float, nullable=False)
@@ -0,0 +1,10 @@
1
+ from sqlalchemy import Column, String
2
+ from ..database.DatabaseManager import Base
3
+
4
+ class UserEntity(Base):
5
+ __tablename__ = "user"
6
+ __bind_key__ = "auth"
7
+
8
+ id = Column(String, primary_key=True)
9
+ username = Column(String, nullable=False)
10
+ password = Column(String, nullable=False)
@@ -0,0 +1,19 @@
1
+ from sqlalchemy import Column, String, Float
2
+ from ..database.DatabaseManager import Base
3
+
4
+ class WorkerSourceEntity(Base):
5
+ __tablename__ = "worker_source"
6
+ __bind_key__ = "config"
7
+
8
+ id = Column(String, primary_key=True)
9
+ name = Column(String, nullable=False)
10
+ worker_id = Column(String, nullable=False)
11
+ type_code = Column(String, nullable=False)
12
+ file_path = Column(String, nullable=True)
13
+ url = Column(String, nullable=False)
14
+ resolution = Column(String, nullable=True)
15
+ status_code = Column(String, nullable=True)
16
+ frame_rate = Column(Float, nullable=True)
17
+ source_location_code = Column(String, nullable=True) # Optional field
18
+ latitude = Column(Float, nullable=True) # Optional field
19
+ longitude = Column(Float, nullable=True) # Optional field
@@ -0,0 +1,21 @@
1
+ from sqlalchemy import Column, String
2
+ from sqlalchemy.orm import relationship
3
+ from ..database.DatabaseManager import Base
4
+
5
+ class WorkerSourcePipelineEntity(Base):
6
+ __tablename__ = "worker_source_pipeline"
7
+ __bind_key__ = "config"
8
+
9
+ id = Column(String, primary_key=True)
10
+ name = Column(String, nullable=False)
11
+ worker_source_id = Column(String, nullable=False)
12
+ worker_id = Column(String, nullable=False)
13
+ ai_model_id = Column(String, nullable=True)
14
+ pipeline_status_code = Column(String, nullable=False)
15
+ location_name = Column(String, nullable=True)
16
+
17
+ worker_source_pipeline_configs = relationship(
18
+ "WorkerSourcePipelineConfigEntity",
19
+ back_populates="pipeline",
20
+ cascade="all, delete-orphan"
21
+ )
@@ -0,0 +1,24 @@
1
+ from sqlalchemy import Column, String, ForeignKey, Boolean
2
+ from sqlalchemy.orm import relationship
3
+ from ..database.DatabaseManager import Base
4
+
5
+
6
+ class WorkerSourcePipelineConfigEntity(Base):
7
+ __tablename__ = "worker_source_pipeline_config"
8
+ __bind_key__ = "config"
9
+
10
+ id = Column(String, primary_key=True)
11
+ worker_source_pipeline_id = Column(
12
+ String, ForeignKey("worker_source_pipeline.id", ondelete="CASCADE"), nullable=False
13
+ )
14
+ pipeline_config_id = Column(String, nullable=False)
15
+ is_enabled = Column(Boolean, nullable=False)
16
+ value = Column(String, nullable=True)
17
+ pipeline_config_name = Column(String, nullable=False)
18
+ pipeline_config_code = Column(String, nullable=False)
19
+
20
+ pipeline = relationship(
21
+ "WorkerSourcePipelineEntity",
22
+ back_populates="worker_source_pipeline_configs",
23
+ passive_deletes=True
24
+ )
@@ -0,0 +1,15 @@
1
+ from datetime import datetime
2
+ import uuid
3
+ from sqlalchemy import Column, DateTime, String
4
+ from ..database.DatabaseManager import Base
5
+
6
+ class WorkerSourcePipelineDebugEntity(Base):
7
+ __tablename__ = "worker_source_pipeline_debug"
8
+ __bind_key__ = "default"
9
+
10
+ id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
11
+ uuid = Column(String, nullable=False)
12
+ worker_source_pipeline_id = Column(String, nullable=False)
13
+ image_path = Column(String, nullable=True)
14
+ data = Column(String, nullable=True)
15
+ created_at = Column(DateTime, default=datetime.utcnow)
@@ -0,0 +1,14 @@
1
+ from datetime import datetime
2
+ import uuid
3
+ from sqlalchemy import Column, DateTime, String
4
+ from ..database.DatabaseManager import Base
5
+
6
+ class WorkerSourcePipelineDetectionEntity(Base):
7
+ __tablename__ = "worker_source_pipeline_detection"
8
+ __bind_key__ = "default"
9
+
10
+ id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
11
+ worker_source_pipeline_id = Column(String, nullable=False)
12
+ image_path = Column(String, nullable=True)
13
+ data = Column(String, nullable=True)
14
+ created_at = Column(DateTime, default=datetime.utcnow)
@@ -0,0 +1,32 @@
1
+ from ..repositories.WorkerSourcePipelineRepository import WorkerSourcePipelineRepository
2
+
3
+
4
+ class PipelineConfigManager:
5
+ def __init__(self):
6
+ self.config_repository = WorkerSourcePipelineRepository()
7
+ self.config = {}
8
+
9
+ def update(self, pipeline_id):
10
+ self.config = self.config_repository.get_pipeline_configs_by_pipeline_id(pipeline_id)
11
+
12
+ def is_feature_enabled(self, feature_name):
13
+ """
14
+ Generic method to check if a feature is enabled in configuration.
15
+
16
+ :param feature_name: Name of the feature to check
17
+ :return: True if feature is enabled, False otherwise
18
+ """
19
+ return (feature_name in self.config and
20
+ self.config[feature_name].get("is_enabled", False))
21
+
22
+ def get_feature_config(self, feature_name, default = {}) -> dict:
23
+ """
24
+ Get the configuration for a specific feature.
25
+
26
+ :param feature_name: Name of the feature
27
+ :return : Feature configuration
28
+ """
29
+ if not self.is_feature_enabled(feature_name):
30
+ return default
31
+
32
+ return self.config[feature_name].get("value", default)
@@ -0,0 +1,133 @@
1
+ import logging
2
+ import threading
3
+ from concurrent.futures import ThreadPoolExecutor, Future
4
+ from typing import Dict
5
+ from .PipelineProcessor import PipelineProcessor
6
+ from ..streams.VideoStreamManager import VideoStreamManager
7
+
8
+ class PipelineManager:
9
+ """Manages AI pipeline execution and video stream processing."""
10
+
11
+ def __init__(self, video_manager: VideoStreamManager, on_pipeline_stopped, max_workers=50):
12
+ self.executor = ThreadPoolExecutor(max_workers=max_workers) # Thread pool for parallel execution
13
+ self.pipeline_threads = {} # Stores Future objects {pipeline_id: Future}
14
+ self.pipeline_metadata = {} # Stores actual pipeline data {pipeline_id: metadata}
15
+ self.video_manager = video_manager # Manages video streams
16
+ self.processors: Dict[str, PipelineProcessor] = {} # Stores PipelineProcessor instances per pipeline
17
+ self.running = True
18
+ self._stopping_pipelines = set() # Track pipelines being stopped
19
+ self._stop_lock = threading.Lock() # Lock for thread-safe pipeline stopping
20
+ self.on_pipeline_stopped = on_pipeline_stopped
21
+
22
+ def start_pipeline(self, pipeline, model):
23
+ """
24
+ Start a pipeline processing.
25
+ Args:
26
+ pipeline: The pipeline object (contains id, worker_source_id, name, etc.)
27
+ model: The AI model to use for processing.
28
+ """
29
+ pipeline_id = pipeline.id
30
+ worker_source_id = pipeline.worker_source_id
31
+
32
+ if not self.running:
33
+ logging.warning(f"⚠️ Attempt to start pipeline {pipeline_id} after shutdown.")
34
+ return
35
+
36
+ if self.is_running(pipeline_id):
37
+ logging.warning(f"⚠️ Pipeline {pipeline_id} is already running.")
38
+ return
39
+
40
+ logging.info(f"🚀 Starting Pipeline processing for pipeline: {pipeline_id} | Source: {worker_source_id} ({pipeline.name})")
41
+
42
+ processor = PipelineProcessor(pipeline_id, worker_source_id, model, False)
43
+ processor.frame_drawer.location_name = pipeline.location_name
44
+ self.processors[pipeline_id] = processor # Store processor instance
45
+
46
+ future = self.executor.submit(processor.process_pipeline, self.video_manager)
47
+ self.pipeline_threads[pipeline_id] = future
48
+ self.pipeline_metadata[pipeline_id] = pipeline
49
+
50
+ # Add callback to detect when a pipeline finishes
51
+ future.add_done_callback(lambda f: self._handle_pipeline_completion(pipeline_id, f))
52
+
53
+ def _handle_pipeline_completion(self, pipeline_id: str, future: Future):
54
+ """
55
+ Handles cleanup when a pipeline finishes processing.
56
+ """
57
+ with self._stop_lock:
58
+ if pipeline_id in self._stopping_pipelines:
59
+ return # If it's already being stopped manually, don't trigger again
60
+
61
+ try:
62
+ if future.cancelled():
63
+ logging.info(f"🚫 Pipeline {pipeline_id} was cancelled.")
64
+ elif future.exception():
65
+ logging.error(f"❌ Pipeline {pipeline_id} encountered an error: {future.exception()}", exc_info=True)
66
+
67
+ except Exception as e:
68
+ logging.error(f"⚠️ Error in handling pipeline {pipeline_id} completion: {e}")
69
+
70
+ finally:
71
+ self.on_pipeline_stopped(pipeline_id)
72
+
73
+ def stop_pipeline(self, pipeline_id: str):
74
+ """Stop an AI processing pipeline."""
75
+ with self._stop_lock:
76
+ if pipeline_id in self._stopping_pipelines:
77
+ logging.debug(f"Pipeline {pipeline_id} already being stopped, skipping")
78
+ return
79
+ self._stopping_pipelines.add(pipeline_id)
80
+
81
+ try:
82
+ # Stop AI processing
83
+ processor = self.processors.pop(pipeline_id, None)
84
+ if processor:
85
+ processor.stop()
86
+
87
+ # Cancel execution thread
88
+ future = self.pipeline_threads.pop(pipeline_id, None)
89
+ if future:
90
+ future.cancel()
91
+
92
+ # Remove metadata
93
+ self.pipeline_metadata.pop(pipeline_id, None)
94
+
95
+ logging.info(f"✅ Pipeline {pipeline_id} stopped successfully.")
96
+
97
+ except Exception as e:
98
+ logging.error(f"❌ Error during pipeline shutdown: {e}")
99
+
100
+ finally:
101
+ self._stopping_pipelines.discard(pipeline_id)
102
+ self.on_pipeline_stopped(pipeline_id)
103
+
104
+ def get_active_pipelines(self):
105
+ """Returns a list of active pipeline IDs."""
106
+ return list(self.pipeline_metadata.keys())
107
+
108
+ def get_pipeline(self, pipeline_id):
109
+ """Returns the actual pipeline metadata (not the Future object)."""
110
+ return self.pipeline_metadata.get(pipeline_id, None)
111
+
112
+ def is_running(self, pipeline_id):
113
+ """
114
+ Checks if a pipeline is currently running.
115
+
116
+ Args:
117
+ pipeline_id (str): The ID of the pipeline to check.
118
+
119
+ Returns:
120
+ bool: True if the pipeline is running, False otherwise.
121
+ """
122
+ return pipeline_id in self.pipeline_threads and not self.pipeline_threads[pipeline_id].done()
123
+
124
+ def shutdown(self):
125
+ """Shuts down the pipeline manager gracefully."""
126
+ logging.info("🛑 Shutting down PipelineManager...")
127
+ self.running = False
128
+
129
+ for pipeline_id in list(self.pipeline_threads.keys()):
130
+ self.stop_pipeline(pipeline_id)
131
+
132
+ self.executor.shutdown(wait=True) # Wait for all threads to finish
133
+ logging.info("✅ PipelineManager stopped.")
@@ -0,0 +1,40 @@
1
+ from typing import List, Tuple
2
+ import numpy as np
3
+ from .PipelineConfigManager import PipelineConfigManager
4
+ from ..preprocessing.ImageResizer import ImageResizer
5
+ from ..preprocessing.ImageRoi import ImageRoi
6
+ from ..preprocessing.Preprocessor import Preprocessor
7
+
8
+
9
+ class PipelinePrepocessor:
10
+ def __init__(self):
11
+ self.preprocessors: List[Preprocessor] = [
12
+ ImageRoi(),
13
+ ImageResizer()
14
+ ]
15
+
16
+ def update(self, config: PipelineConfigManager):
17
+ for preprocessor in self.preprocessors:
18
+ preprocessor.update_config(config)
19
+
20
+ def apply(self, image: np.ndarray) -> np.ndarray:
21
+ image = image.copy()
22
+
23
+ for preprocessor in self.preprocessors:
24
+ image = preprocessor.apply(image)
25
+
26
+ return image
27
+
28
+ def revert_detections_bboxes(self, detections: list, dimension: Tuple[int, int]) -> np.ndarray:
29
+ if not detections or len(detections) < 1:
30
+ return detections
31
+
32
+ bboxes = np.array([det["bbox"] for det in detections])
33
+
34
+ for preprocessor in reversed(self.preprocessors):
35
+ bboxes = preprocessor.revert_bboxes(bboxes, dimension)
36
+
37
+ for det, bbox in zip(detections, bboxes):
38
+ det["bbox"] = bbox
39
+
40
+ return detections