nedo-vision-worker-core 0.2.0__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of nedo-vision-worker-core might be problematic. Click here for more details.
- nedo_vision_worker_core/__init__.py +47 -12
- nedo_vision_worker_core/callbacks/DetectionCallbackManager.py +306 -0
- nedo_vision_worker_core/callbacks/DetectionCallbackTypes.py +150 -0
- nedo_vision_worker_core/callbacks/__init__.py +27 -0
- nedo_vision_worker_core/cli.py +47 -5
- nedo_vision_worker_core/core_service.py +121 -55
- nedo_vision_worker_core/database/DatabaseManager.py +2 -2
- nedo_vision_worker_core/detection/BaseDetector.py +2 -1
- nedo_vision_worker_core/detection/DetectionManager.py +2 -2
- nedo_vision_worker_core/detection/RFDETRDetector.py +23 -5
- nedo_vision_worker_core/detection/YOLODetector.py +18 -5
- nedo_vision_worker_core/detection/detection_processing/DetectionProcessor.py +1 -1
- nedo_vision_worker_core/detection/detection_processing/HumanDetectionProcessor.py +57 -3
- nedo_vision_worker_core/detection/detection_processing/PPEDetectionProcessor.py +173 -10
- nedo_vision_worker_core/models/ai_model.py +23 -2
- nedo_vision_worker_core/pipeline/PipelineProcessor.py +51 -8
- nedo_vision_worker_core/pipeline/PipelineSyncThread.py +32 -0
- nedo_vision_worker_core/repositories/PPEDetectionRepository.py +18 -15
- nedo_vision_worker_core/repositories/RestrictedAreaRepository.py +17 -13
- nedo_vision_worker_core/services/SharedVideoStreamServer.py +276 -0
- nedo_vision_worker_core/services/VideoSharingDaemon.py +808 -0
- nedo_vision_worker_core/services/VideoSharingDaemonManager.py +257 -0
- nedo_vision_worker_core/streams/SharedVideoDeviceManager.py +383 -0
- nedo_vision_worker_core/streams/StreamSyncThread.py +16 -2
- nedo_vision_worker_core/streams/VideoStream.py +208 -246
- nedo_vision_worker_core/streams/VideoStreamManager.py +158 -6
- nedo_vision_worker_core/tracker/TrackerManager.py +25 -31
- nedo_vision_worker_core-0.3.0.dist-info/METADATA +444 -0
- {nedo_vision_worker_core-0.2.0.dist-info → nedo_vision_worker_core-0.3.0.dist-info}/RECORD +32 -25
- nedo_vision_worker_core-0.2.0.dist-info/METADATA +0 -347
- {nedo_vision_worker_core-0.2.0.dist-info → nedo_vision_worker_core-0.3.0.dist-info}/WHEEL +0 -0
- {nedo_vision_worker_core-0.2.0.dist-info → nedo_vision_worker_core-0.3.0.dist-info}/entry_points.txt +0 -0
- {nedo_vision_worker_core-0.2.0.dist-info → nedo_vision_worker_core-0.3.0.dist-info}/top_level.txt +0 -0
|
@@ -6,6 +6,7 @@ from .DetectionProcessor import DetectionProcessor
|
|
|
6
6
|
from ...pipeline.PipelineConfigManager import PipelineConfigManager
|
|
7
7
|
from ...repositories.PPEDetectionRepository import PPEDetectionRepository
|
|
8
8
|
from ...util.PersonAttributeMatcher import PersonAttributeMatcher
|
|
9
|
+
from ...callbacks import DetectionType, DetectionAttribute, BoundingBox, DetectionData
|
|
9
10
|
|
|
10
11
|
class PPEDetectionProcessor(DetectionProcessor):
|
|
11
12
|
code = "ppe"
|
|
@@ -15,22 +16,118 @@ class PPEDetectionProcessor(DetectionProcessor):
|
|
|
15
16
|
"vest": "icons/vest-green.png",
|
|
16
17
|
"no_vest": "icons/vest-red.png"
|
|
17
18
|
}
|
|
18
|
-
|
|
19
|
-
violation_labels = ["no_helmet", "no_vest", "no_gloves", "no_goggles", "no_boots"]
|
|
20
|
-
compliance_labels = ["helmet", "vest", "gloves", "goggles", "boots"]
|
|
21
|
-
exclusive_labels = [("helmet", "no_helmet"), ("vest", "no_vest"), ("gloves", "no_gloves"), ("goggles", "no_goggles"), ("boots", "no_boots")]
|
|
22
|
-
|
|
19
|
+
|
|
23
20
|
def __init__(self):
|
|
24
21
|
self.ppe_storage = PPEDetectionRepository()
|
|
25
22
|
self.types = []
|
|
23
|
+
self.ppe_groups = {}
|
|
24
|
+
self.group_thresholds = {}
|
|
25
|
+
self.main_class_threshold = 0.7
|
|
26
|
+
self.main_class = "person"
|
|
27
|
+
|
|
28
|
+
self.labels = ["helmet", "no_helmet", "vest", "no_vest", "gloves", "no_gloves", "goggles", "no_goggles", "boots", "no_boots"]
|
|
29
|
+
self.violation_labels = ["no_helmet", "no_vest", "no_gloves", "no_goggles", "no_boots"]
|
|
30
|
+
self.compliance_labels = ["helmet", "vest", "gloves", "goggles", "boots"]
|
|
31
|
+
self.exclusive_labels = [("helmet", "no_helmet"), ("vest", "no_vest"), ("gloves", "no_gloves"), ("goggles", "no_goggles"), ("boots", "no_boots")]
|
|
32
|
+
|
|
33
|
+
def update(self, config_manager: PipelineConfigManager, ai_model=None):
|
|
34
|
+
config = config_manager.get_feature_config(self.code, {})
|
|
35
|
+
|
|
36
|
+
# Update from AI model
|
|
37
|
+
if ai_model:
|
|
38
|
+
self._update_from_ai_model(ai_model)
|
|
39
|
+
|
|
40
|
+
# Update PPE type configuration
|
|
41
|
+
ppe_type_configs = config.get("ppeType", [])
|
|
42
|
+
self.types = []
|
|
43
|
+
self.group_thresholds = {}
|
|
44
|
+
|
|
45
|
+
for ppe_config in ppe_type_configs:
|
|
46
|
+
if isinstance(ppe_config, dict):
|
|
47
|
+
group = ppe_config.get("group")
|
|
48
|
+
threshold = ppe_config.get("confidenceThreshold", 0.7)
|
|
49
|
+
if group:
|
|
50
|
+
self.types.append(group)
|
|
51
|
+
self.group_thresholds[group] = threshold
|
|
52
|
+
elif isinstance(ppe_config, str):
|
|
53
|
+
# Backward compatibility
|
|
54
|
+
self.types.append(ppe_config)
|
|
55
|
+
self.group_thresholds[ppe_config] = 0.7
|
|
56
|
+
|
|
57
|
+
# Update main class threshold
|
|
58
|
+
self.main_class_threshold = config.get("mainClassConfidenceThreshold", 0.7)
|
|
59
|
+
|
|
60
|
+
def _update_from_ai_model(self, ai_model):
|
|
61
|
+
"""Update processor settings from AI model configuration"""
|
|
62
|
+
if ai_model and hasattr(ai_model, 'ppe_groups') and ai_model.ppe_groups:
|
|
63
|
+
self.ppe_groups = {group.group_name: group for group in ai_model.ppe_groups}
|
|
64
|
+
self._build_labels_from_groups()
|
|
65
|
+
|
|
66
|
+
if ai_model and hasattr(ai_model, 'main_class') and ai_model.main_class:
|
|
67
|
+
self.main_class = ai_model.main_class
|
|
26
68
|
|
|
27
|
-
def
|
|
28
|
-
|
|
29
|
-
|
|
69
|
+
def _build_labels_from_groups(self):
|
|
70
|
+
"""Build standard PPE labels from AI model PPE groups"""
|
|
71
|
+
if not self.ppe_groups:
|
|
72
|
+
return
|
|
73
|
+
|
|
74
|
+
labels = []
|
|
75
|
+
violation_labels = []
|
|
76
|
+
compliance_labels = []
|
|
77
|
+
exclusive_labels = []
|
|
78
|
+
|
|
79
|
+
for group_name in self.ppe_groups.items():
|
|
80
|
+
compliance_class = group_name
|
|
81
|
+
violation_class = f"no_{group_name}"
|
|
82
|
+
|
|
83
|
+
# Build label lists using standard naming
|
|
84
|
+
labels.extend([compliance_class, violation_class])
|
|
85
|
+
compliance_labels.append(compliance_class)
|
|
86
|
+
violation_labels.append(violation_class)
|
|
87
|
+
exclusive_labels.append((compliance_class, violation_class))
|
|
88
|
+
|
|
89
|
+
# Update instance variables with dynamic labels
|
|
90
|
+
self.labels = labels
|
|
91
|
+
self.violation_labels = violation_labels
|
|
92
|
+
self.compliance_labels = compliance_labels
|
|
93
|
+
self.exclusive_labels = exclusive_labels
|
|
94
|
+
|
|
95
|
+
def get_multi_instance_classes(self):
|
|
96
|
+
"""Get PPE classes that can have multiple instances per person"""
|
|
97
|
+
multi_instance_base = ["boots", "gloves", "goggles"]
|
|
98
|
+
multi_instance = []
|
|
99
|
+
|
|
100
|
+
for label in self.labels:
|
|
101
|
+
base_label = label.replace("no_", "") if label.startswith("no_") else label
|
|
102
|
+
if base_label in multi_instance_base:
|
|
103
|
+
multi_instance.append(label)
|
|
104
|
+
return multi_instance
|
|
30
105
|
|
|
31
106
|
def process(self, detections: List[Dict[str, Any]], dimension: Tuple[int, int]) -> List[Dict[str, Any]]:
|
|
32
|
-
persons = [d for d in detections if d["label"] ==
|
|
33
|
-
|
|
107
|
+
persons = [d for d in detections if d["label"] == self.main_class]
|
|
108
|
+
|
|
109
|
+
ppe_attributes = []
|
|
110
|
+
for detection in detections:
|
|
111
|
+
label = detection["label"]
|
|
112
|
+
|
|
113
|
+
for group_name in self.types:
|
|
114
|
+
if group_name in self.ppe_groups:
|
|
115
|
+
group_config = self.ppe_groups[group_name]
|
|
116
|
+
|
|
117
|
+
original_compliance = group_config.get("compliance")
|
|
118
|
+
original_violation = group_config.get("violation")
|
|
119
|
+
|
|
120
|
+
if label in [original_compliance, original_violation]:
|
|
121
|
+
if label == original_compliance:
|
|
122
|
+
detection["label"] = group_name
|
|
123
|
+
elif label == original_violation:
|
|
124
|
+
detection["label"] = f"no_{group_name}"
|
|
125
|
+
|
|
126
|
+
ppe_attributes.append(detection)
|
|
127
|
+
break
|
|
128
|
+
elif label == group_name or label == f"no_{group_name}":
|
|
129
|
+
ppe_attributes.append(detection)
|
|
130
|
+
break
|
|
34
131
|
|
|
35
132
|
matched_results = PersonAttributeMatcher.match_persons_with_attributes(
|
|
36
133
|
persons, ppe_attributes, coverage_threshold=0.5
|
|
@@ -38,7 +135,73 @@ class PPEDetectionProcessor(DetectionProcessor):
|
|
|
38
135
|
|
|
39
136
|
return matched_results
|
|
40
137
|
|
|
138
|
+
def get_class_thresholds(self):
|
|
139
|
+
"""Get confidence thresholds for each class using original AI model class names"""
|
|
140
|
+
thresholds = {}
|
|
141
|
+
|
|
142
|
+
for group_name, threshold in self.group_thresholds.items():
|
|
143
|
+
if group_name in self.ppe_groups:
|
|
144
|
+
group_config = self.ppe_groups[group_name]
|
|
145
|
+
|
|
146
|
+
original_compliance = group_config.get("compliance")
|
|
147
|
+
original_violation = group_config.get("violation")
|
|
148
|
+
|
|
149
|
+
if original_compliance:
|
|
150
|
+
thresholds[original_compliance] = threshold
|
|
151
|
+
if original_violation:
|
|
152
|
+
thresholds[original_violation] = threshold
|
|
153
|
+
|
|
154
|
+
thresholds[group_name] = threshold
|
|
155
|
+
thresholds[f"no_{group_name}"] = threshold
|
|
156
|
+
else:
|
|
157
|
+
thresholds[group_name] = threshold
|
|
158
|
+
if not group_name.startswith("no_"):
|
|
159
|
+
thresholds[f"no_{group_name}"] = threshold
|
|
160
|
+
|
|
161
|
+
return thresholds
|
|
162
|
+
|
|
41
163
|
def save_to_db(self, pipeline_id: str, worker_source_id: str, frame_counter: int, tracked_objects: List[Dict[str, Any]], frame: np.ndarray, frame_drawer: FrameDrawer):
|
|
42
164
|
self.ppe_storage.save_ppe_detection(
|
|
43
165
|
pipeline_id, worker_source_id, frame_counter, tracked_objects, frame, frame_drawer
|
|
44
166
|
)
|
|
167
|
+
|
|
168
|
+
@staticmethod
|
|
169
|
+
def create_detection_data(pipeline_id: str, worker_source_id: str, person_id: str,
|
|
170
|
+
detection_id: str, tracked_obj: Dict[str, Any],
|
|
171
|
+
image_path: str = "", image_tile_path: str = "",
|
|
172
|
+
frame_id: int = 0) -> DetectionData:
|
|
173
|
+
"""Create DetectionData from PPE detection data."""
|
|
174
|
+
bbox = BoundingBox.from_list(tracked_obj["bbox"])
|
|
175
|
+
|
|
176
|
+
attributes = []
|
|
177
|
+
for attr in tracked_obj.get("attributes", []):
|
|
178
|
+
attr_bbox = None
|
|
179
|
+
if "bbox" in attr:
|
|
180
|
+
attr_bbox = BoundingBox.from_list(attr["bbox"])
|
|
181
|
+
|
|
182
|
+
# Determine if this is a violation based on label
|
|
183
|
+
is_violation = attr["label"].startswith("no_") or attr["label"] in [
|
|
184
|
+
"no_helmet", "no_vest", "no_gloves", "no_goggles", "no_boots"
|
|
185
|
+
]
|
|
186
|
+
|
|
187
|
+
attributes.append(DetectionAttribute(
|
|
188
|
+
label=attr["label"],
|
|
189
|
+
confidence=attr.get("confidence", 1.0),
|
|
190
|
+
count=attr.get("count", 0),
|
|
191
|
+
bbox=attr_bbox,
|
|
192
|
+
is_violation=is_violation
|
|
193
|
+
))
|
|
194
|
+
|
|
195
|
+
return DetectionData(
|
|
196
|
+
detection_type=DetectionType.PPE_DETECTION,
|
|
197
|
+
detection_id=detection_id,
|
|
198
|
+
person_id=person_id,
|
|
199
|
+
pipeline_id=pipeline_id,
|
|
200
|
+
worker_source_id=worker_source_id,
|
|
201
|
+
confidence_score=tracked_obj.get("confidence", 1.0),
|
|
202
|
+
bbox=bbox,
|
|
203
|
+
attributes=attributes,
|
|
204
|
+
image_path=image_path,
|
|
205
|
+
image_tile_path=image_tile_path,
|
|
206
|
+
frame_id=frame_id
|
|
207
|
+
)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import uuid
|
|
2
|
+
import json
|
|
2
3
|
from sqlalchemy import Column, String, DateTime
|
|
3
|
-
from datetime import datetime
|
|
4
4
|
from ..database.DatabaseManager import Base
|
|
5
5
|
|
|
6
6
|
class AIModelEntity(Base):
|
|
@@ -15,6 +15,9 @@ class AIModelEntity(Base):
|
|
|
15
15
|
download_status = Column(String, nullable=True, default="completed") # pending, downloading, completed, failed
|
|
16
16
|
last_download_attempt = Column(DateTime, nullable=True)
|
|
17
17
|
download_error = Column(String, nullable=True)
|
|
18
|
+
classes = Column(String, nullable=True)
|
|
19
|
+
ppe_class_groups = Column(String, nullable=True)
|
|
20
|
+
main_class = Column(String, nullable=True)
|
|
18
21
|
|
|
19
22
|
def __repr__(self):
|
|
20
23
|
return (
|
|
@@ -38,4 +41,22 @@ class AIModelEntity(Base):
|
|
|
38
41
|
|
|
39
42
|
def has_download_failed(self) -> bool:
|
|
40
43
|
"""Check if the model download has failed."""
|
|
41
|
-
return self.download_status == "failed"
|
|
44
|
+
return self.download_status == "failed"
|
|
45
|
+
|
|
46
|
+
def set_classes(self, classes_list):
|
|
47
|
+
self.classes = json.dumps(classes_list)
|
|
48
|
+
|
|
49
|
+
def get_classes(self):
|
|
50
|
+
return json.loads(self.classes) if self.classes else []
|
|
51
|
+
|
|
52
|
+
def set_ppe_class_groups(self, groups_list):
|
|
53
|
+
self.ppe_class_groups = json.dumps(groups_list)
|
|
54
|
+
|
|
55
|
+
def get_ppe_class_groups(self):
|
|
56
|
+
return json.loads(self.ppe_class_groups) if self.ppe_class_groups else []
|
|
57
|
+
|
|
58
|
+
def set_main_class(self, main_class):
|
|
59
|
+
self.main_class = main_class
|
|
60
|
+
|
|
61
|
+
def get_main_class(self):
|
|
62
|
+
return self.main_class or None
|
|
@@ -55,6 +55,21 @@ class PipelineProcessor:
|
|
|
55
55
|
self.debug_repo = WorkerSourcePipelineDebugRepository()
|
|
56
56
|
self.detection_repo = WorkerSourcePipelineDetectionRepository()
|
|
57
57
|
|
|
58
|
+
def load_model(self, model):
|
|
59
|
+
"""
|
|
60
|
+
Load a new AI model into the detection manager.
|
|
61
|
+
This allows runtime model updates without restarting the pipeline.
|
|
62
|
+
|
|
63
|
+
:param model: The new AI model to load
|
|
64
|
+
"""
|
|
65
|
+
logging.info(f"🔄 Loading new model for pipeline {self.pipeline_id}: {model.name if model else 'None'}")
|
|
66
|
+
self.detection_manager.load_model(model)
|
|
67
|
+
|
|
68
|
+
# Re-initialize detection processor to use the new model configuration
|
|
69
|
+
self._update_detection_processor()
|
|
70
|
+
|
|
71
|
+
logging.info(f"✅ Model updated for pipeline {self.pipeline_id}")
|
|
72
|
+
|
|
58
73
|
def _get_detection_processor_code(self):
|
|
59
74
|
for code in self.detection_processor_codes:
|
|
60
75
|
if self.config_manager.is_feature_enabled(code):
|
|
@@ -83,8 +98,15 @@ class PipelineProcessor:
|
|
|
83
98
|
violation_labels=self.detection_processor.violation_labels,
|
|
84
99
|
compliance_labels=self.detection_processor.compliance_labels,
|
|
85
100
|
)
|
|
86
|
-
|
|
87
|
-
|
|
101
|
+
multi_instance_classes = []
|
|
102
|
+
if hasattr(self.detection_processor, 'get_multi_instance_classes'):
|
|
103
|
+
multi_instance_classes = self.detection_processor.get_multi_instance_classes()
|
|
104
|
+
|
|
105
|
+
self.tracker_manager.update_config(
|
|
106
|
+
attribute_labels=self.detection_processor.labels,
|
|
107
|
+
exclusive_attribute_groups=self.detection_processor.exclusive_labels,
|
|
108
|
+
multi_instance_classes=multi_instance_classes
|
|
109
|
+
)
|
|
88
110
|
|
|
89
111
|
def _update_config(self):
|
|
90
112
|
self.config_manager.update(self.pipeline_id)
|
|
@@ -92,9 +114,11 @@ class PipelineProcessor:
|
|
|
92
114
|
self.detection_interval = self._get_detection_interval()
|
|
93
115
|
self._update_detection_processor()
|
|
94
116
|
|
|
117
|
+
ai_model = self.detection_manager.model_metadata
|
|
118
|
+
|
|
95
119
|
if self.detection_processor:
|
|
96
120
|
config = self.config_manager.get_feature_config(self.detection_processor.code)
|
|
97
|
-
self.detection_processor.update(self.config_manager)
|
|
121
|
+
self.detection_processor.update(self.config_manager, ai_model)
|
|
98
122
|
self.threshold = config.get("minimumDetectionConfidence", 0.7)
|
|
99
123
|
|
|
100
124
|
if self.detection_processor.code == HumanDetectionProcessor.code:
|
|
@@ -102,8 +126,11 @@ class PipelineProcessor:
|
|
|
102
126
|
else:
|
|
103
127
|
self.threshold = 0.7
|
|
104
128
|
self.frame_drawer.update_config()
|
|
105
|
-
self.tracker_manager.
|
|
106
|
-
|
|
129
|
+
self.tracker_manager.update_config(
|
|
130
|
+
attribute_labels=[],
|
|
131
|
+
exclusive_attribute_groups=[],
|
|
132
|
+
multi_instance_classes=[]
|
|
133
|
+
)
|
|
107
134
|
|
|
108
135
|
def process_pipeline(self, video_manager: VideoStreamManager):
|
|
109
136
|
"""
|
|
@@ -195,10 +222,26 @@ class PipelineProcessor:
|
|
|
195
222
|
dimension = frame.shape[:2]
|
|
196
223
|
|
|
197
224
|
processed_frame = self.preprocessor.apply(frame)
|
|
198
|
-
|
|
225
|
+
|
|
226
|
+
class_thresholds = {}
|
|
227
|
+
ai_model = self.detection_manager.model_metadata
|
|
228
|
+
|
|
229
|
+
if self.detection_processor:
|
|
230
|
+
if self.detection_processor.code == PPEDetectionProcessor.code:
|
|
231
|
+
class_thresholds.update(self.detection_processor.get_class_thresholds())
|
|
232
|
+
elif self.detection_processor.code == HumanDetectionProcessor.code:
|
|
233
|
+
main_threshold = self.detection_processor.get_main_class_threshold(ai_model)
|
|
234
|
+
if main_threshold and ai_model and ai_model.get_main_class():
|
|
235
|
+
class_thresholds[ai_model.get_main_class()] = main_threshold
|
|
236
|
+
|
|
237
|
+
detections = self.detection_manager.detect_objects(processed_frame, self.threshold, class_thresholds)
|
|
199
238
|
detections = self.preprocessor.revert_detections_bboxes(detections, dimension)
|
|
200
|
-
|
|
201
|
-
|
|
239
|
+
|
|
240
|
+
if self.detection_processor:
|
|
241
|
+
matched_results = self.detection_processor.process(detections, dimension)
|
|
242
|
+
return self.tracker_manager.track_objects(matched_results)
|
|
243
|
+
else:
|
|
244
|
+
return self.tracker_manager.track_objects(detections)
|
|
202
245
|
|
|
203
246
|
|
|
204
247
|
def _detection_worker(self):
|
|
@@ -171,6 +171,38 @@ class PipelineSyncThread(threading.Thread):
|
|
|
171
171
|
else:
|
|
172
172
|
logging.warning(f"⚠️ Pipeline {pid}: {readiness['reason']}")
|
|
173
173
|
|
|
174
|
+
# Case 7: Model metadata has changed (same ID and version, but different properties)
|
|
175
|
+
elif local_model and db_model and local_model.id == db_model.id and local_model.version == db_model.version:
|
|
176
|
+
# Check if model metadata (classes, PPE groups, main_class) has changed
|
|
177
|
+
if self._has_model_metadata_changed(local_model, db_model):
|
|
178
|
+
readiness = ModelReadinessChecker.check_model_readiness(db_model)
|
|
179
|
+
if readiness["ready"]:
|
|
180
|
+
local_proc.load_model(db_model)
|
|
181
|
+
logging.info(f"🔄 Model metadata updated for pipeline {pid}: {db_pipeline.name} "
|
|
182
|
+
f"(same version {db_model.version}, updated properties)")
|
|
183
|
+
else:
|
|
184
|
+
logging.warning(f"⚠️ Pipeline {pid}: {readiness['reason']}")
|
|
185
|
+
|
|
186
|
+
def _has_model_metadata_changed(self, local_model, db_model):
|
|
187
|
+
"""Check if model metadata has changed without version change."""
|
|
188
|
+
# Compare classes
|
|
189
|
+
local_classes = set(local_model.get_classes() or [])
|
|
190
|
+
db_classes = set(db_model.get_classes() or [])
|
|
191
|
+
if local_classes != db_classes:
|
|
192
|
+
return True
|
|
193
|
+
|
|
194
|
+
# Compare PPE class groups
|
|
195
|
+
local_ppe_groups = local_model.get_ppe_class_groups() or {}
|
|
196
|
+
db_ppe_groups = db_model.get_ppe_class_groups() or {}
|
|
197
|
+
if local_ppe_groups != db_ppe_groups:
|
|
198
|
+
return True
|
|
199
|
+
|
|
200
|
+
# Compare main class
|
|
201
|
+
if local_model.get_main_class() != db_model.get_main_class():
|
|
202
|
+
return True
|
|
203
|
+
|
|
204
|
+
return False
|
|
205
|
+
|
|
174
206
|
def _has_pipeline_changed(self, local_pipeline, db_pipeline):
|
|
175
207
|
"""Checks if the pipeline configuration has changed."""
|
|
176
208
|
if db_pipeline.pipeline_status_code == "restart":
|
|
@@ -120,23 +120,26 @@ class PPEDetectionRepository:
|
|
|
120
120
|
self.session.commit()
|
|
121
121
|
logging.info(f"✅ Inserted detection for Person {person_id}, Attributes: {valid_attributes}")
|
|
122
122
|
|
|
123
|
-
# Trigger detection callback
|
|
123
|
+
# Trigger detection callback with unified data structure
|
|
124
124
|
try:
|
|
125
125
|
from ..core_service import CoreService
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
126
|
+
from ..detection.detection_processing.PPEDetectionProcessor import PPEDetectionProcessor
|
|
127
|
+
|
|
128
|
+
# Create unified detection data using the processor's factory method
|
|
129
|
+
unified_data = PPEDetectionProcessor.create_detection_data(
|
|
130
|
+
pipeline_id=pipeline_id,
|
|
131
|
+
worker_source_id=worker_source_id,
|
|
132
|
+
person_id=person_id,
|
|
133
|
+
detection_id=new_detection.id,
|
|
134
|
+
tracked_obj=tracked_obj,
|
|
135
|
+
image_path=full_image_path,
|
|
136
|
+
image_tile_path=cropped_image_path,
|
|
137
|
+
frame_id=frame_id
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
# Trigger callbacks
|
|
141
|
+
CoreService.trigger_detection(unified_data)
|
|
142
|
+
|
|
140
143
|
except Exception as e:
|
|
141
144
|
logging.warning(f"⚠️ Failed to trigger PPE detection callback: {e}")
|
|
142
145
|
|
|
@@ -69,19 +69,23 @@ class RestrictedAreaRepository:
|
|
|
69
69
|
# Trigger detection callback
|
|
70
70
|
try:
|
|
71
71
|
from ..core_service import CoreService
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
'
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
72
|
+
from ..detection.detection_processing.HumanDetectionProcessor import HumanDetectionProcessor
|
|
73
|
+
|
|
74
|
+
# Create unified detection data using the processor's factory method
|
|
75
|
+
unified_data = HumanDetectionProcessor.create_detection_data(
|
|
76
|
+
pipeline_id=pipeline_id,
|
|
77
|
+
worker_source_id=worker_source_id,
|
|
78
|
+
person_id=person_id,
|
|
79
|
+
detection_id=new_detection.id if hasattr(new_detection, 'id') else f"area_{person_id}_{current_datetime}",
|
|
80
|
+
tracked_obj=tracked_obj,
|
|
81
|
+
image_path=full_image_path,
|
|
82
|
+
image_tile_path=cropped_image_path,
|
|
83
|
+
frame_id=frame_id
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
# Trigger callbacks
|
|
87
|
+
CoreService.trigger_detection(unified_data)
|
|
88
|
+
|
|
85
89
|
except Exception as e:
|
|
86
90
|
logging.warning(f"⚠️ Failed to trigger area violation callback: {e}")
|
|
87
91
|
|