matrice-analytics 0.1.97__py3-none-any.whl → 0.1.124__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- matrice_analytics/post_processing/__init__.py +22 -0
- matrice_analytics/post_processing/advanced_tracker/config.py +8 -4
- matrice_analytics/post_processing/advanced_tracker/track_class_aggregator.py +128 -0
- matrice_analytics/post_processing/advanced_tracker/tracker.py +22 -1
- matrice_analytics/post_processing/config.py +17 -2
- matrice_analytics/post_processing/core/config.py +107 -1
- matrice_analytics/post_processing/face_reg/face_recognition.py +706 -73
- matrice_analytics/post_processing/face_reg/people_activity_logging.py +25 -14
- matrice_analytics/post_processing/post_processor.py +16 -0
- matrice_analytics/post_processing/usecases/__init__.py +9 -0
- matrice_analytics/post_processing/usecases/crowdflow.py +1088 -0
- matrice_analytics/post_processing/usecases/footfall.py +170 -22
- matrice_analytics/post_processing/usecases/license_plate_monitoring.py +57 -38
- matrice_analytics/post_processing/usecases/parking_lot_analytics.py +1137 -0
- matrice_analytics/post_processing/usecases/vehicle_monitoring.py +30 -4
- matrice_analytics/post_processing/usecases/vehicle_monitoring_drone_view.py +246 -3
- matrice_analytics/post_processing/usecases/vehicle_monitoring_parking_lot.py +36 -3
- matrice_analytics/post_processing/usecases/vehicle_monitoring_wrong_way.py +1021 -0
- matrice_analytics/post_processing/utils/__init__.py +5 -0
- matrice_analytics/post_processing/utils/agnostic_nms.py +759 -0
- matrice_analytics/post_processing/utils/alert_instance_utils.py +55 -7
- matrice_analytics/post_processing/utils/business_metrics_manager_utils.py +25 -2
- matrice_analytics/post_processing/utils/incident_manager_utils.py +12 -1
- matrice_analytics/post_processing/utils/parking_analytics_tracker.py +359 -0
- matrice_analytics/post_processing/utils/wrong_way_tracker.py +670 -0
- {matrice_analytics-0.1.97.dist-info → matrice_analytics-0.1.124.dist-info}/METADATA +1 -1
- {matrice_analytics-0.1.97.dist-info → matrice_analytics-0.1.124.dist-info}/RECORD +30 -23
- {matrice_analytics-0.1.97.dist-info → matrice_analytics-0.1.124.dist-info}/WHEEL +0 -0
- {matrice_analytics-0.1.97.dist-info → matrice_analytics-0.1.124.dist-info}/licenses/LICENSE.txt +0 -0
- {matrice_analytics-0.1.97.dist-info → matrice_analytics-0.1.124.dist-info}/top_level.txt +0 -0
|
@@ -1,7 +1,10 @@
|
|
|
1
1
|
from typing import Any, Dict, List, Optional
|
|
2
2
|
from dataclasses import asdict
|
|
3
3
|
import time
|
|
4
|
+
import math
|
|
5
|
+
import numpy as np
|
|
4
6
|
from datetime import datetime, timezone
|
|
7
|
+
from collections import defaultdict, deque
|
|
5
8
|
|
|
6
9
|
from ..core.base import BaseProcessor, ProcessingContext, ProcessingResult, ConfigProtocol, ResultFormat
|
|
7
10
|
from ..utils import (
|
|
@@ -19,6 +22,95 @@ from ..utils import (
|
|
|
19
22
|
from dataclasses import dataclass, field
|
|
20
23
|
from ..core.config import BaseConfig, AlertConfig, ZoneConfig
|
|
21
24
|
|
|
25
|
+
class TrajectoryCorrector:
|
|
26
|
+
"""
|
|
27
|
+
Handles Velocity-Fusion logic to correct model orientation errors.
|
|
28
|
+
Stores history of track centers and applies EMA smoothing.
|
|
29
|
+
"""
|
|
30
|
+
def __init__(self):
|
|
31
|
+
# track_id -> { "centers": deque, "smooth_angle": float }
|
|
32
|
+
self.history = defaultdict(lambda: {
|
|
33
|
+
"centers": deque(maxlen=10),
|
|
34
|
+
"angles": deque(maxlen=5),
|
|
35
|
+
"smooth_angle": None
|
|
36
|
+
})
|
|
37
|
+
|
|
38
|
+
def get_direction_label(self, angle):
|
|
39
|
+
"""
|
|
40
|
+
Your custom logic for Front/Back/Left/Right
|
|
41
|
+
"""
|
|
42
|
+
if angle is None: return "unknown"
|
|
43
|
+
angle = angle % 360
|
|
44
|
+
if 45 <= angle < 135: return "back"
|
|
45
|
+
elif 135 <= angle < 225: return "left"
|
|
46
|
+
elif 225 <= angle < 315: return "front"
|
|
47
|
+
else: return "right"
|
|
48
|
+
|
|
49
|
+
def update_and_get_label(self, track_id, center, raw_angle_deg):
|
|
50
|
+
"""
|
|
51
|
+
1. Fixes Angle (+90)
|
|
52
|
+
2. Calculates Velocity
|
|
53
|
+
3. Applies EMA Smoothing
|
|
54
|
+
4. Returns (Smooth_Angle, Label_String)
|
|
55
|
+
"""
|
|
56
|
+
state = self.history[track_id]
|
|
57
|
+
state["centers"].append(center)
|
|
58
|
+
|
|
59
|
+
# --- FIX 1: ROTATE MODEL ANGLE ---
|
|
60
|
+
if raw_angle_deg is None: raw_angle_deg = 0.0
|
|
61
|
+
fixed_raw_angle = (raw_angle_deg + 90) % 360
|
|
62
|
+
state["angles"].append(fixed_raw_angle)
|
|
63
|
+
|
|
64
|
+
# --- FIX 2: CALCULATE VELOCITY ---
|
|
65
|
+
motion_angle = self._compute_motion_angle(state["centers"])
|
|
66
|
+
|
|
67
|
+
# Decide Target Angle
|
|
68
|
+
if motion_angle is not None:
|
|
69
|
+
target_angle = motion_angle
|
|
70
|
+
elif fixed_raw_angle is not None:
|
|
71
|
+
target_angle = fixed_raw_angle
|
|
72
|
+
elif state["smooth_angle"] is not None:
|
|
73
|
+
target_angle = state["smooth_angle"]
|
|
74
|
+
else:
|
|
75
|
+
target_angle = 0.0
|
|
76
|
+
|
|
77
|
+
# --- FIX 3: EMA SMOOTHING ---
|
|
78
|
+
alpha = 0.2
|
|
79
|
+
|
|
80
|
+
if state["smooth_angle"] is None:
|
|
81
|
+
state["smooth_angle"] = target_angle
|
|
82
|
+
else:
|
|
83
|
+
prev_rad = math.radians(state["smooth_angle"])
|
|
84
|
+
curr_rad = math.radians(target_angle)
|
|
85
|
+
|
|
86
|
+
new_sin = (1 - alpha) * math.sin(prev_rad) + alpha * math.sin(curr_rad)
|
|
87
|
+
new_cos = (1 - alpha) * math.cos(prev_rad) + alpha * math.cos(curr_rad)
|
|
88
|
+
|
|
89
|
+
state["smooth_angle"] = math.degrees(math.atan2(new_sin, new_cos)) % 360
|
|
90
|
+
|
|
91
|
+
final_angle = state["smooth_angle"]
|
|
92
|
+
label = self.get_direction_label(final_angle)
|
|
93
|
+
|
|
94
|
+
return final_angle, label
|
|
95
|
+
|
|
96
|
+
def _compute_motion_angle(self, centers):
|
|
97
|
+
if len(centers) < 2:
|
|
98
|
+
return None
|
|
99
|
+
|
|
100
|
+
# Look back 5 frames for stability
|
|
101
|
+
lookback = min(len(centers), 5)
|
|
102
|
+
(x_past, y_past) = centers[-lookback]
|
|
103
|
+
(x_now, y_now) = centers[-1]
|
|
104
|
+
|
|
105
|
+
dx = x_now - x_past
|
|
106
|
+
dy = y_now - y_past
|
|
107
|
+
|
|
108
|
+
# THRESHOLD: 2.5 pixels
|
|
109
|
+
if math.hypot(dx, dy) < 0.5:
|
|
110
|
+
return None
|
|
111
|
+
|
|
112
|
+
return math.degrees(math.atan2(-dy, dx)) % 360
|
|
113
|
+
|
|
22
114
|
@dataclass
|
|
23
115
|
class FootFallConfig(BaseConfig):
|
|
24
116
|
"""Configuration for footfall use case."""
|
|
@@ -46,6 +138,7 @@ class FootFallConfig(BaseConfig):
|
|
|
46
138
|
|
|
47
139
|
target_categories: List[str] = field(
|
|
48
140
|
default_factory=lambda: ['person']
|
|
141
|
+
|
|
49
142
|
)
|
|
50
143
|
|
|
51
144
|
def validate(self) -> List[str]:
|
|
@@ -79,9 +172,12 @@ class FootFallUseCase(BaseProcessor):
|
|
|
79
172
|
self.category = "retail"
|
|
80
173
|
self.CASE_TYPE: Optional[str] = 'footfall'
|
|
81
174
|
self.CASE_VERSION: Optional[str] = '1.1'
|
|
82
|
-
self.target_categories = ['person']
|
|
175
|
+
self.target_categories = ['person']
|
|
83
176
|
self.smoothing_tracker = None
|
|
84
177
|
self.tracker = None
|
|
178
|
+
|
|
179
|
+
# Initialize the Velocity Logic
|
|
180
|
+
self.trajectory_corrector = TrajectoryCorrector()
|
|
85
181
|
self._total_frame_counter = 0
|
|
86
182
|
self._global_frame_offset = 0
|
|
87
183
|
self._tracking_start_time = None
|
|
@@ -105,33 +201,17 @@ class FootFallUseCase(BaseProcessor):
|
|
|
105
201
|
context.input_format = input_format
|
|
106
202
|
context.confidence_threshold = config.confidence_threshold
|
|
107
203
|
|
|
204
|
+
# ... [Keep your standard filtering logic here: confidence, mapping, categories] ...
|
|
108
205
|
if config.confidence_threshold is not None:
|
|
109
206
|
processed_data = filter_by_confidence(data, config.confidence_threshold)
|
|
110
|
-
self.logger.debug(f"Applied confidence filtering with threshold {config.confidence_threshold}")
|
|
111
207
|
else:
|
|
112
208
|
processed_data = data
|
|
113
|
-
self.logger.debug("Did not apply confidence filtering since no threshold provided")
|
|
114
209
|
|
|
115
210
|
if config.index_to_category:
|
|
116
211
|
processed_data = apply_category_mapping(processed_data, config.index_to_category)
|
|
117
|
-
self.logger.debug("Applied category mapping")
|
|
118
212
|
|
|
119
213
|
if config.target_categories:
|
|
120
214
|
processed_data = [d for d in processed_data if d.get('category') in self.target_categories]
|
|
121
|
-
self.logger.debug("Applied category filtering")
|
|
122
|
-
|
|
123
|
-
# if config.enable_smoothing:
|
|
124
|
-
# if self.smoothing_tracker is None:
|
|
125
|
-
# smoothing_config = BBoxSmoothingConfig(
|
|
126
|
-
# smoothing_algorithm=config.smoothing_algorithm,
|
|
127
|
-
# window_size=config.smoothing_window_size,
|
|
128
|
-
# cooldown_frames=config.smoothing_cooldown_frames,
|
|
129
|
-
# confidence_threshold=config.confidence_threshold,
|
|
130
|
-
# confidence_range_factor=config.smoothing_confidence_range_factor,
|
|
131
|
-
# enable_smoothing=True
|
|
132
|
-
# )
|
|
133
|
-
# self.smoothing_tracker = BBoxSmoothingTracker(smoothing_config)
|
|
134
|
-
# processed_data = bbox_smoothing(processed_data, self.smoothing_tracker.config, self.smoothing_tracker)
|
|
135
215
|
|
|
136
216
|
try:
|
|
137
217
|
from ..advanced_tracker import AdvancedTracker
|
|
@@ -144,9 +224,61 @@ class FootFallUseCase(BaseProcessor):
|
|
|
144
224
|
match_thresh=0.8)
|
|
145
225
|
self.tracker = AdvancedTracker(tracker_config)
|
|
146
226
|
self.logger.info("Initialized AdvancedTracker for People Counting")
|
|
227
|
+
|
|
228
|
+
# 1. Run Standard Tracker (Assigns IDs)
|
|
147
229
|
processed_data = self.tracker.update(processed_data)
|
|
230
|
+
|
|
231
|
+
# =========================================================
|
|
232
|
+
# NEW: INJECT VELOCITY FUSION LOGIC (CORRECTED)
|
|
233
|
+
# =========================================================
|
|
234
|
+
for det in processed_data:
|
|
235
|
+
track_id = det.get("track_id")
|
|
236
|
+
|
|
237
|
+
# STREAM-SAFE bbox normalization
|
|
238
|
+
bbox = det.get("bbox") or det.get("bounding_box")
|
|
239
|
+
|
|
240
|
+
if isinstance(bbox, dict):
|
|
241
|
+
bbox = [
|
|
242
|
+
bbox.get("xmin"),
|
|
243
|
+
bbox.get("ymin"),
|
|
244
|
+
bbox.get("xmax"),
|
|
245
|
+
bbox.get("ymax"),
|
|
246
|
+
]
|
|
247
|
+
|
|
248
|
+
# Hard safety guard
|
|
249
|
+
if not bbox or len(bbox) < 4:
|
|
250
|
+
continue
|
|
251
|
+
|
|
252
|
+
|
|
253
|
+
# Check for 'raw_angle' (from predict.py) or 'orientation'
|
|
254
|
+
raw_angle = det.get("angle", det.get("raw_angle", det.get("orientation", 0.0)))
|
|
255
|
+
|
|
256
|
+
if track_id is not None and bbox:
|
|
257
|
+
# Calculate Center (cx, cy)
|
|
258
|
+
cx = int((bbox[0] + bbox[2]) / 2)
|
|
259
|
+
cy = int((bbox[1] + bbox[3]) / 2)
|
|
260
|
+
|
|
261
|
+
# Run Correction (Velocity + EMA + 90 Fix)
|
|
262
|
+
# FIX: Unpack both values (Angle AND Label)
|
|
263
|
+
final_angle, direction_label = self.trajectory_corrector.update_and_get_label(
|
|
264
|
+
track_id,
|
|
265
|
+
(cx, cy),
|
|
266
|
+
raw_angle
|
|
267
|
+
)
|
|
268
|
+
|
|
269
|
+
# OVERWRITE the detection angle
|
|
270
|
+
det["orientation"] = final_angle # For UI
|
|
271
|
+
det["angle"] = final_angle # For Analytics
|
|
272
|
+
|
|
273
|
+
# FIX: SAVE THE DIRECTION LABEL
|
|
274
|
+
det["direction"] = direction_label # "front", "back", etc.
|
|
275
|
+
# =========================================================
|
|
276
|
+
|
|
148
277
|
except Exception as e:
|
|
149
|
-
self.logger.warning(f"AdvancedTracker failed: {e}")
|
|
278
|
+
self.logger.warning(f"AdvancedTracker/Velocity failed: {e}")
|
|
279
|
+
|
|
280
|
+
# ... [The rest of your process method remains exactly the same] ...
|
|
281
|
+
|
|
150
282
|
|
|
151
283
|
self._update_tracking_state(processed_data)
|
|
152
284
|
self._total_frame_counter += 1
|
|
@@ -461,8 +593,21 @@ class FootFallUseCase(BaseProcessor):
|
|
|
461
593
|
raw_track_id = det.get("track_id")
|
|
462
594
|
if cat not in self.target_categories or raw_track_id is None:
|
|
463
595
|
continue
|
|
464
|
-
bbox = det.get("
|
|
596
|
+
bbox = det.get("bbox") or det.get("bounding_box")
|
|
597
|
+
|
|
598
|
+
if isinstance(bbox, dict):
|
|
599
|
+
bbox = [
|
|
600
|
+
bbox.get("xmin"),
|
|
601
|
+
bbox.get("ymin"),
|
|
602
|
+
bbox.get("xmax"),
|
|
603
|
+
bbox.get("ymax"),
|
|
604
|
+
]
|
|
605
|
+
|
|
606
|
+
if not bbox or len(bbox) < 4:
|
|
607
|
+
continue
|
|
608
|
+
|
|
465
609
|
canonical_id = self._merge_or_register_track(raw_track_id, bbox)
|
|
610
|
+
|
|
466
611
|
det["track_id"] = canonical_id
|
|
467
612
|
self._per_category_total_track_ids.setdefault(cat, set()).add(canonical_id)
|
|
468
613
|
self._current_frame_track_ids[cat].add(canonical_id)
|
|
@@ -646,7 +791,7 @@ class FootFallUseCase(BaseProcessor):
|
|
|
646
791
|
def _count_categories(self, detections: list, config: FootFallConfig) -> dict:
|
|
647
792
|
counts = {}
|
|
648
793
|
for det in detections:
|
|
649
|
-
cat = det.get(
|
|
794
|
+
cat = det.get("direction") or "unknown"
|
|
650
795
|
counts[cat] = counts.get(cat, 0) + 1
|
|
651
796
|
return {
|
|
652
797
|
"total_count": sum(counts.values()),
|
|
@@ -655,9 +800,12 @@ class FootFallUseCase(BaseProcessor):
|
|
|
655
800
|
{
|
|
656
801
|
"bounding_box": det.get("bounding_box"),
|
|
657
802
|
"category": det.get("category"),
|
|
803
|
+
"direction": det.get("direction"),
|
|
658
804
|
"confidence": det.get("confidence"),
|
|
659
805
|
"track_id": det.get("track_id"),
|
|
660
|
-
"frame_id": det.get("frame_id")
|
|
806
|
+
"frame_id": det.get("frame_id"),
|
|
807
|
+
"angle": det.get("angle"),
|
|
808
|
+
"orientation": det.get("orientation") #for UI arrows
|
|
661
809
|
}
|
|
662
810
|
for det in detections
|
|
663
811
|
]
|
|
@@ -535,6 +535,10 @@ class LicensePlateMonitorUseCase(BaseProcessor):
|
|
|
535
535
|
self.plate_logger: Optional[LicensePlateMonitorLogger] = None
|
|
536
536
|
self._logging_enabled = True # False //ToDo: DISABLED FOR NOW, ENABLED FOR PRODUCTION. ##
|
|
537
537
|
self._plate_logger_initialized = False # Track if plate logger has been initialized
|
|
538
|
+
|
|
539
|
+
# Track which track_ids have been logged to avoid duplicate logging
|
|
540
|
+
# Only log confirmed/consensus plates, not every OCR prediction
|
|
541
|
+
self._logged_track_ids: set = set()
|
|
538
542
|
|
|
539
543
|
# Initialize instant alert manager (will be lazily initialized on first process() call)
|
|
540
544
|
self.alert_manager: Optional[ALERT_INSTANCE] = None
|
|
@@ -942,6 +946,8 @@ class LicensePlateMonitorUseCase(BaseProcessor):
|
|
|
942
946
|
self._unique_plate_texts = {}
|
|
943
947
|
self.helper = {}
|
|
944
948
|
self.unique_plate_track = {}
|
|
949
|
+
# Reset logged track_ids to allow fresh logging
|
|
950
|
+
self._logged_track_ids = set()
|
|
945
951
|
self.logger.info("Plate tracking state reset")
|
|
946
952
|
|
|
947
953
|
def reset_all_tracking(self) -> None:
|
|
@@ -1106,7 +1112,7 @@ class LicensePlateMonitorUseCase(BaseProcessor):
|
|
|
1106
1112
|
# Send to alert manager for evaluation
|
|
1107
1113
|
try:
|
|
1108
1114
|
self.logger.info(f"[ALERT_DEBUG] Sending detection event #{i+1} to alert manager...")
|
|
1109
|
-
self.alert_manager.process_detection_event(detection_event)
|
|
1115
|
+
self.alert_manager.process_detection_event(detection_event, stream_info)
|
|
1110
1116
|
self.logger.info(f"[ALERT_DEBUG] ✓ Sent detection event to alert manager: plate={plate_text}, confidence={confidence:.2f}")
|
|
1111
1117
|
sent_count += 1
|
|
1112
1118
|
except Exception as e:
|
|
@@ -1147,13 +1153,19 @@ class LicensePlateMonitorUseCase(BaseProcessor):
|
|
|
1147
1153
|
|
|
1148
1154
|
async def _log_detected_plates(self, detections: List[Dict[str, Any]], config: LicensePlateMonitorConfig,
|
|
1149
1155
|
stream_info: Optional[Dict[str, Any]], image_bytes: Optional[bytes] = None) -> None:
|
|
1150
|
-
"""
|
|
1156
|
+
"""
|
|
1157
|
+
Log confirmed/consensus plates to RPC server.
|
|
1158
|
+
|
|
1159
|
+
Only logs plates that have reached consensus (are in _tracked_plate_texts),
|
|
1160
|
+
and only logs each track_id once to avoid duplicate logging of garbage OCR predictions.
|
|
1161
|
+
Uses the confirmed consensus plate text, not the raw frame-by-frame OCR output.
|
|
1162
|
+
"""
|
|
1151
1163
|
# Enhanced logging for diagnostics
|
|
1152
1164
|
print(f"[LP_LOGGING] Starting plate logging check - detections count: {len(detections)}")
|
|
1153
1165
|
self.logger.info(f"[LP_LOGGING] Starting plate logging check - detections count: {len(detections)}")
|
|
1154
|
-
self.logger.info(f"[LP_LOGGING] Logging enabled: {self._logging_enabled}, Plate logger exists: {self.plate_logger is not None}
|
|
1166
|
+
self.logger.info(f"[LP_LOGGING] Logging enabled: {self._logging_enabled}, Plate logger exists: {self.plate_logger is not None}")
|
|
1167
|
+
self.logger.info(f"[LP_LOGGING] Confirmed plates (tracked): {len(self._tracked_plate_texts)}, Already logged tracks: {len(self._logged_track_ids)}")
|
|
1155
1168
|
|
|
1156
|
-
#self._logging_enabled = False # ToDo: DISABLED FOR NOW, ENABLED FOR PRODUCTION
|
|
1157
1169
|
if not self._logging_enabled:
|
|
1158
1170
|
print("[LP_LOGGING] Plate logging is DISABLED")
|
|
1159
1171
|
self.logger.warning("[LP_LOGGING] Plate logging is DISABLED - logging_enabled flag is False")
|
|
@@ -1164,11 +1176,6 @@ class LicensePlateMonitorUseCase(BaseProcessor):
|
|
|
1164
1176
|
self.logger.warning("[LP_LOGGING] Plate logging SKIPPED - plate_logger is not initialized (lpr_server_id may not be configured)")
|
|
1165
1177
|
return
|
|
1166
1178
|
|
|
1167
|
-
# if not stream_info:
|
|
1168
|
-
# print("[LP_LOGGING] Plate logging SKIPPED - stream_info is None")
|
|
1169
|
-
# self.logger.warning("[LP_LOGGING] Plate logging SKIPPED - stream_info is None")
|
|
1170
|
-
# return
|
|
1171
|
-
|
|
1172
1179
|
print("[LP_LOGGING] All pre-conditions met, proceeding with plate logging")
|
|
1173
1180
|
self.logger.info(f"[LP_LOGGING] All pre-conditions met, proceeding with plate logging")
|
|
1174
1181
|
|
|
@@ -1195,35 +1202,45 @@ class LicensePlateMonitorUseCase(BaseProcessor):
|
|
|
1195
1202
|
else:
|
|
1196
1203
|
self.logger.warning(f"[LP_LOGGING] Failed to decode image bytes")
|
|
1197
1204
|
except Exception as e:
|
|
1198
|
-
#pass
|
|
1199
1205
|
self.logger.error(f"[LP_LOGGING] Exception while encoding frame image: {e}", exc_info=True)
|
|
1200
1206
|
else:
|
|
1201
1207
|
self.logger.info(f"[LP_LOGGING] No image_bytes provided, sending without image")
|
|
1202
1208
|
|
|
1203
|
-
#
|
|
1204
|
-
|
|
1205
|
-
|
|
1209
|
+
# Only log CONFIRMED/CONSENSUS plates from _tracked_plate_texts
|
|
1210
|
+
# Avoid logging every raw OCR prediction - only log final confirmed plate per track_id
|
|
1211
|
+
plates_to_log = {} # track_id -> consensus_plate_text
|
|
1212
|
+
|
|
1206
1213
|
for det in detections:
|
|
1207
|
-
|
|
1208
|
-
if
|
|
1209
|
-
detections_without_text += 1
|
|
1214
|
+
track_id = det.get('track_id')
|
|
1215
|
+
if track_id is None:
|
|
1210
1216
|
continue
|
|
1211
|
-
|
|
1217
|
+
|
|
1218
|
+
# Skip if this track_id has already been logged
|
|
1219
|
+
if track_id in self._logged_track_ids:
|
|
1220
|
+
self.logger.debug(f"[LP_LOGGING] Skipping track_id={track_id} - already logged")
|
|
1221
|
+
continue
|
|
1222
|
+
|
|
1223
|
+
# Only log if this track_id has a confirmed/consensus plate
|
|
1224
|
+
if track_id in self._tracked_plate_texts:
|
|
1225
|
+
consensus_plate = self._tracked_plate_texts[track_id]
|
|
1226
|
+
if consensus_plate:
|
|
1227
|
+
plates_to_log[track_id] = consensus_plate
|
|
1228
|
+
self.logger.debug(f"[LP_LOGGING] Found confirmed plate for track_id={track_id}: {consensus_plate}")
|
|
1212
1229
|
|
|
1213
|
-
|
|
1214
|
-
|
|
1215
|
-
|
|
1216
|
-
|
|
1230
|
+
confirmed_count = len(plates_to_log)
|
|
1231
|
+
raw_ocr_count = sum(1 for d in detections if d.get('plate_text'))
|
|
1232
|
+
print(f"[LP_LOGGING] Confirmed plates to log: {confirmed_count} (from {raw_ocr_count} raw OCR detections)")
|
|
1233
|
+
self.logger.info(f"[LP_LOGGING] Confirmed plates to log: {confirmed_count}, Raw OCR detections: {raw_ocr_count}")
|
|
1234
|
+
self.logger.info(f"[LP_LOGGING] Plates: {list(plates_to_log.values())}")
|
|
1217
1235
|
|
|
1218
|
-
# Log each
|
|
1236
|
+
# Log each confirmed plate (respecting cooldown)
|
|
1219
1237
|
if plates_to_log:
|
|
1220
|
-
print(f"[LP_LOGGING] Logging {len(plates_to_log)} plates with cooldown={config.plate_log_cooldown}s")
|
|
1221
|
-
self.logger.info(f"[LP_LOGGING] Logging {len(plates_to_log)} plates with cooldown={config.plate_log_cooldown}s")
|
|
1238
|
+
print(f"[LP_LOGGING] Logging {len(plates_to_log)} confirmed plates with cooldown={config.plate_log_cooldown}s")
|
|
1239
|
+
self.logger.info(f"[LP_LOGGING] Logging {len(plates_to_log)} confirmed plates with cooldown={config.plate_log_cooldown}s")
|
|
1222
1240
|
try:
|
|
1223
|
-
|
|
1224
|
-
|
|
1225
|
-
|
|
1226
|
-
self.logger.info(f"[LP_LOGGING] Processing plate: {plate_text}")
|
|
1241
|
+
for track_id, plate_text in plates_to_log.items():
|
|
1242
|
+
print(f"[LP_LOGGING] Processing confirmed plate: {plate_text} (track_id={track_id})")
|
|
1243
|
+
self.logger.info(f"[LP_LOGGING] Processing confirmed plate: {plate_text} (track_id={track_id})")
|
|
1227
1244
|
try:
|
|
1228
1245
|
result = await self.plate_logger.log_plate(
|
|
1229
1246
|
plate_text=plate_text,
|
|
@@ -1232,25 +1249,26 @@ class LicensePlateMonitorUseCase(BaseProcessor):
|
|
|
1232
1249
|
image_data=image_data,
|
|
1233
1250
|
cooldown=config.plate_log_cooldown
|
|
1234
1251
|
)
|
|
1235
|
-
|
|
1236
|
-
|
|
1237
|
-
|
|
1252
|
+
if result:
|
|
1253
|
+
# Mark this track_id as logged to avoid duplicate logging
|
|
1254
|
+
self._logged_track_ids.add(track_id)
|
|
1255
|
+
print(f"[LP_LOGGING] Plate {plate_text}: SENT (track_id={track_id} marked as logged)")
|
|
1256
|
+
self.logger.info(f"[LP_LOGGING] Plate {plate_text}: SENT (track_id={track_id} marked as logged)")
|
|
1257
|
+
else:
|
|
1258
|
+
print(f"[LP_LOGGING] Plate {plate_text}: SKIPPED (cooldown)")
|
|
1259
|
+
self.logger.info(f"[LP_LOGGING] Plate {plate_text}: SKIPPED (cooldown)")
|
|
1238
1260
|
except Exception as e:
|
|
1239
|
-
#pass
|
|
1240
1261
|
print(f"[LP_LOGGING] ERROR - Plate {plate_text} failed: {e}")
|
|
1241
1262
|
self.logger.error(f"[LP_LOGGING] Plate {plate_text} raised exception: {e}", exc_info=True)
|
|
1242
1263
|
|
|
1243
1264
|
print("[LP_LOGGING] Plate logging complete")
|
|
1244
|
-
self.logger.info(f"[LP_LOGGING] Plate logging complete")
|
|
1265
|
+
self.logger.info(f"[LP_LOGGING] Plate logging complete - {len(self._logged_track_ids)} total tracks logged so far")
|
|
1245
1266
|
except Exception as e:
|
|
1246
|
-
print(f"[LP_LOGGING] CRITICAL ERROR during plate logging: {e}")
|
|
1247
|
-
|
|
1248
1267
|
print(f"[LP_LOGGING] CRITICAL ERROR during plate logging: {e}")
|
|
1249
1268
|
self.logger.error(f"[LP_LOGGING] CRITICAL ERROR during plate logging: {e}", exc_info=True)
|
|
1250
|
-
pass
|
|
1251
1269
|
else:
|
|
1252
|
-
print("[LP_LOGGING] No plates to log")
|
|
1253
|
-
self.logger.info(f"[LP_LOGGING] No plates to log (
|
|
1270
|
+
print("[LP_LOGGING] No confirmed plates to log (plates may still be reaching consensus)")
|
|
1271
|
+
self.logger.info(f"[LP_LOGGING] No confirmed plates to log (waiting for consensus)")
|
|
1254
1272
|
|
|
1255
1273
|
async def process(self, data: Any, config: ConfigProtocol, input_bytes: Optional[bytes] = None,
|
|
1256
1274
|
context: Optional[ProcessingContext] = None, stream_info: Optional[Dict[str, Any]] = None) -> ProcessingResult:
|
|
@@ -1298,6 +1316,7 @@ class LicensePlateMonitorUseCase(BaseProcessor):
|
|
|
1298
1316
|
|
|
1299
1317
|
input_format = match_results_structure(data)
|
|
1300
1318
|
context.input_format = input_format
|
|
1319
|
+
config.confidence_threshold = 0.1
|
|
1301
1320
|
context.confidence_threshold = config.confidence_threshold
|
|
1302
1321
|
self._ocr_mode = config.ocr_mode
|
|
1303
1322
|
self.logger.info(f"Processing license plate monitoring with format: {input_format.value}")
|