matrice-analytics 0.1.106__py3-none-any.whl → 0.1.124__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- matrice_analytics/post_processing/__init__.py +22 -0
- matrice_analytics/post_processing/config.py +15 -0
- matrice_analytics/post_processing/core/config.py +107 -1
- matrice_analytics/post_processing/face_reg/face_recognition.py +2 -2
- matrice_analytics/post_processing/post_processor.py +16 -0
- matrice_analytics/post_processing/usecases/__init__.py +9 -0
- matrice_analytics/post_processing/usecases/crowdflow.py +1088 -0
- matrice_analytics/post_processing/usecases/footfall.py +103 -62
- matrice_analytics/post_processing/usecases/license_plate_monitoring.py +2 -1
- matrice_analytics/post_processing/usecases/parking_lot_analytics.py +1137 -0
- matrice_analytics/post_processing/usecases/vehicle_monitoring.py +30 -4
- matrice_analytics/post_processing/usecases/vehicle_monitoring_drone_view.py +33 -6
- matrice_analytics/post_processing/usecases/vehicle_monitoring_parking_lot.py +18 -2
- matrice_analytics/post_processing/usecases/vehicle_monitoring_wrong_way.py +1021 -0
- matrice_analytics/post_processing/utils/alert_instance_utils.py +18 -5
- matrice_analytics/post_processing/utils/business_metrics_manager_utils.py +25 -2
- matrice_analytics/post_processing/utils/incident_manager_utils.py +12 -1
- matrice_analytics/post_processing/utils/parking_analytics_tracker.py +359 -0
- matrice_analytics/post_processing/utils/wrong_way_tracker.py +670 -0
- {matrice_analytics-0.1.106.dist-info → matrice_analytics-0.1.124.dist-info}/METADATA +1 -1
- {matrice_analytics-0.1.106.dist-info → matrice_analytics-0.1.124.dist-info}/RECORD +24 -19
- {matrice_analytics-0.1.106.dist-info → matrice_analytics-0.1.124.dist-info}/WHEEL +0 -0
- {matrice_analytics-0.1.106.dist-info → matrice_analytics-0.1.124.dist-info}/licenses/LICENSE.txt +0 -0
- {matrice_analytics-0.1.106.dist-info → matrice_analytics-0.1.124.dist-info}/top_level.txt +0 -0
|
@@ -497,12 +497,13 @@ class ALERT_INSTANCE:
|
|
|
497
497
|
self.logger.error(f"[ALERT_DEBUG] ❌ EXCEPTION in _handle_config_message: {e}", exc_info=True)
|
|
498
498
|
self.logger.error(f"[ALERT_DEBUG] Failed config_data: {config_data}")
|
|
499
499
|
|
|
500
|
-
def process_detection_event(self, detection_payload: Dict[str, Any]):
|
|
500
|
+
def process_detection_event(self, detection_payload: Dict[str, Any], stream_info: Optional[Dict[str, Any]] = None):
|
|
501
501
|
"""
|
|
502
502
|
Process a detection event and evaluate against active alerts.
|
|
503
503
|
|
|
504
504
|
Args:
|
|
505
505
|
detection_payload: Detection event data
|
|
506
|
+
stream_info: Stream metadata containing stream_time and other info
|
|
506
507
|
"""
|
|
507
508
|
try:
|
|
508
509
|
self.logger.info(f"[ALERT_DEBUG] ========== PROCESSING DETECTION EVENT ==========")
|
|
@@ -550,7 +551,7 @@ class ALERT_INSTANCE:
|
|
|
550
551
|
acquired, prev_time = self._try_acquire_cooldown(alert.instant_alert_id, detection_key)
|
|
551
552
|
if acquired:
|
|
552
553
|
self.logger.info(f"[ALERT_DEBUG] ✓ Alert matched and cooldown acquired, publishing trigger...")
|
|
553
|
-
publish_ok = self._publish_trigger(alert, detection_payload)
|
|
554
|
+
publish_ok = self._publish_trigger(alert, detection_payload, stream_info)
|
|
554
555
|
if not publish_ok:
|
|
555
556
|
# Rollback cooldown if publish failed
|
|
556
557
|
self._rollback_cooldown(alert.instant_alert_id, detection_key, prev_time)
|
|
@@ -885,13 +886,13 @@ class ALERT_INSTANCE:
|
|
|
885
886
|
|
|
886
887
|
return False
|
|
887
888
|
|
|
888
|
-
def _publish_trigger(self, alert: AlertConfig, detection: Dict[str, Any]) -> bool:
|
|
889
|
+
def _publish_trigger(self, alert: AlertConfig, detection: Dict[str, Any], stream_info: Optional[Dict[str, Any]] = None) -> bool:
|
|
889
890
|
"""Publish trigger message to backend. Returns True if published successfully."""
|
|
890
891
|
self.logger.info(f"[ALERT_DEBUG] ========== PUBLISHING TRIGGER ==========")
|
|
891
892
|
self.logger.info(f"[ALERT_DEBUG] Alert ID: {alert.instant_alert_id}")
|
|
892
893
|
self.logger.info(f"[ALERT_DEBUG] Alert Name: {alert.alert_name}")
|
|
893
894
|
|
|
894
|
-
trigger_message = self._build_trigger_message(alert, detection)
|
|
895
|
+
trigger_message = self._build_trigger_message(alert, detection, stream_info)
|
|
895
896
|
|
|
896
897
|
self.logger.info(f"[ALERT_DEBUG] Built trigger message: {trigger_message}")
|
|
897
898
|
|
|
@@ -925,11 +926,22 @@ class ALERT_INSTANCE:
|
|
|
925
926
|
def _build_trigger_message(
|
|
926
927
|
self,
|
|
927
928
|
alert: AlertConfig,
|
|
928
|
-
detection: Dict[str, Any]
|
|
929
|
+
detection: Dict[str, Any],
|
|
930
|
+
stream_info: Optional[Dict[str, Any]] = None
|
|
929
931
|
) -> Dict[str, Any]:
|
|
930
932
|
"""Build trigger message in exact format specified in documentation."""
|
|
931
933
|
detection_type_raw = detection.get("detectionType", "").lower()
|
|
932
934
|
triggered_at = datetime.now(timezone.utc).isoformat()
|
|
935
|
+
|
|
936
|
+
# Extract stream_time from stream_info
|
|
937
|
+
stream_time = ""
|
|
938
|
+
if stream_info:
|
|
939
|
+
stream_time = stream_info.get("stream_time", "")
|
|
940
|
+
if not stream_time:
|
|
941
|
+
# Try alternative paths
|
|
942
|
+
input_settings = stream_info.get("input_settings", {})
|
|
943
|
+
if isinstance(input_settings, dict):
|
|
944
|
+
stream_time = input_settings.get("stream_time", "")
|
|
933
945
|
|
|
934
946
|
context_data = {
|
|
935
947
|
"detectionType": detection_type_raw,
|
|
@@ -1003,6 +1015,7 @@ class ALERT_INSTANCE:
|
|
|
1003
1015
|
"camera_id": alert.camera_id,
|
|
1004
1016
|
"frame_id": detection.get("frame_id", ""),
|
|
1005
1017
|
"triggered_at": triggered_at,
|
|
1018
|
+
"stream_time": stream_time, # Add stream_time from stream_info
|
|
1006
1019
|
"context_data": context_data,
|
|
1007
1020
|
"contextData": context_data_enhanced, # Enhanced contextData with bbox, conf, timestamp, type
|
|
1008
1021
|
}
|
|
@@ -90,6 +90,7 @@ class CameraMetricsState:
|
|
|
90
90
|
application_id: str = ""
|
|
91
91
|
location_id: str = ""
|
|
92
92
|
location_name: str = ""
|
|
93
|
+
stream_time: str = "" # Store most recent stream_time
|
|
93
94
|
metrics: Dict[str, MetricAggregator] = field(default_factory=dict)
|
|
94
95
|
last_push_time: float = field(default_factory=time.time)
|
|
95
96
|
|
|
@@ -530,6 +531,16 @@ class BUSINESS_METRICS_MANAGER:
|
|
|
530
531
|
final_application_id = camera_info.get("application_id") or factory_application_id or ""
|
|
531
532
|
final_location_id = camera_info.get("location_id") or ""
|
|
532
533
|
|
|
534
|
+
# Extract stream_time from stream_info
|
|
535
|
+
final_stream_time = ""
|
|
536
|
+
if stream_info:
|
|
537
|
+
final_stream_time = stream_info.get("stream_time", "")
|
|
538
|
+
if not final_stream_time:
|
|
539
|
+
# Try alternative paths
|
|
540
|
+
input_settings = stream_info.get("input_settings", {})
|
|
541
|
+
if isinstance(input_settings, dict):
|
|
542
|
+
final_stream_time = input_settings.get("stream_time", "")
|
|
543
|
+
|
|
533
544
|
# Fetch location_name from API using location_id
|
|
534
545
|
final_location_name = self._fetch_location_name(final_location_id)
|
|
535
546
|
|
|
@@ -549,7 +560,8 @@ class BUSINESS_METRICS_MANAGER:
|
|
|
549
560
|
app_deployment_id=final_app_deployment_id,
|
|
550
561
|
application_id=final_application_id,
|
|
551
562
|
location_id=final_location_id,
|
|
552
|
-
location_name=final_location_name
|
|
563
|
+
location_name=final_location_name,
|
|
564
|
+
stream_time=final_stream_time
|
|
553
565
|
)
|
|
554
566
|
self.logger.info(
|
|
555
567
|
f"[BUSINESS_METRICS_MANAGER] ✓ Created new state for camera: {final_camera_id}"
|
|
@@ -566,12 +578,17 @@ class BUSINESS_METRICS_MANAGER:
|
|
|
566
578
|
self.logger.debug(f"[BUSINESS_METRICS_MANAGER] Updated app_deployment_id to: {final_app_deployment_id}")
|
|
567
579
|
if final_application_id and not state.application_id:
|
|
568
580
|
state.application_id = final_application_id
|
|
581
|
+
self.logger.debug(f"[BUSINESS_METRICS_MANAGER] Updated application_id to: {final_application_id}")
|
|
569
582
|
if final_location_id and not state.location_id:
|
|
570
583
|
state.location_id = final_location_id
|
|
571
584
|
self.logger.debug(f"[BUSINESS_METRICS_MANAGER] Updated location_id to: {final_location_id}")
|
|
572
585
|
if final_location_name and not state.location_name:
|
|
573
586
|
state.location_name = final_location_name
|
|
574
587
|
self.logger.debug(f"[BUSINESS_METRICS_MANAGER] Updated location_name to: {final_location_name}")
|
|
588
|
+
# Always update stream_time with most recent value
|
|
589
|
+
if final_stream_time:
|
|
590
|
+
state.stream_time = final_stream_time
|
|
591
|
+
self.logger.debug(f"[BUSINESS_METRICS_MANAGER] Updated stream_time to: {final_stream_time}")
|
|
575
592
|
|
|
576
593
|
# Add each metric value to aggregator
|
|
577
594
|
metrics_added = 0
|
|
@@ -673,12 +690,18 @@ class BUSINESS_METRICS_MANAGER:
|
|
|
673
690
|
# Build the message
|
|
674
691
|
aggregated_metrics = state.get_aggregated_metrics()
|
|
675
692
|
|
|
693
|
+
# Get application_id from factory if not in state (fallback)
|
|
694
|
+
final_application_id = state.application_id
|
|
695
|
+
if not final_application_id and self._factory_ref:
|
|
696
|
+
final_application_id = self._factory_ref._application_id or ""
|
|
697
|
+
|
|
676
698
|
message = {
|
|
677
699
|
"camera_id": state.camera_id,
|
|
678
700
|
"camera_name": state.camera_name,
|
|
679
701
|
"app_deployment_id": state.app_deployment_id,
|
|
680
|
-
"application_id":
|
|
702
|
+
"application_id": final_application_id, # Ensure application_id is included
|
|
681
703
|
"location_name": state.location_name,
|
|
704
|
+
"stream_time": state.stream_time, # Add stream_time from state
|
|
682
705
|
"business_metrics": aggregated_metrics,
|
|
683
706
|
"timestamp": datetime.now(timezone.utc).isoformat(),
|
|
684
707
|
"aggregation_interval_seconds": self.aggregation_interval
|
|
@@ -1151,6 +1151,16 @@ class INCIDENT_MANAGER:
|
|
|
1151
1151
|
# Extract frame_id from stream_info
|
|
1152
1152
|
final_frame_id = stream_camera_info.get("frame_id", "")
|
|
1153
1153
|
|
|
1154
|
+
# Extract stream_time from stream_info
|
|
1155
|
+
stream_time = ""
|
|
1156
|
+
if stream_info:
|
|
1157
|
+
stream_time = stream_info.get("stream_time", "")
|
|
1158
|
+
if not stream_time:
|
|
1159
|
+
# Try alternative paths
|
|
1160
|
+
input_settings = stream_info.get("input_settings", {})
|
|
1161
|
+
if isinstance(input_settings, dict):
|
|
1162
|
+
stream_time = input_settings.get("stream_time", "")
|
|
1163
|
+
|
|
1154
1164
|
# Fetch location_name from API using location_id
|
|
1155
1165
|
location_id = stream_camera_info.get("location_id", "")
|
|
1156
1166
|
final_location_name = self._fetch_location_name(location_id)
|
|
@@ -1184,6 +1194,7 @@ class INCIDENT_MANAGER:
|
|
|
1184
1194
|
"camera_name": final_camera_name,
|
|
1185
1195
|
"frame_id": final_frame_id,
|
|
1186
1196
|
"location_name": final_location_name,
|
|
1197
|
+
"stream_time": stream_time, # Add stream_time from stream_info
|
|
1187
1198
|
"incidents": [incident]
|
|
1188
1199
|
}
|
|
1189
1200
|
|
|
@@ -1521,7 +1532,7 @@ class IncidentManagerFactory:
|
|
|
1521
1532
|
f"[INCIDENT_MANAGER_FACTORY] Detected Cloud environment "
|
|
1522
1533
|
f"(Public IP={public_ip}, Server IP={server_host})"
|
|
1523
1534
|
)
|
|
1524
|
-
|
|
1535
|
+
is_localhost = True # TODO: HARD CODED FOR LOCALHOST AND REDIS ONLY
|
|
1525
1536
|
redis_client = None
|
|
1526
1537
|
kafka_client = None
|
|
1527
1538
|
|
|
@@ -0,0 +1,359 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Parking Analytics Tracker
|
|
3
|
+
|
|
4
|
+
This module provides dwell time and parking status tracking for vehicles.
|
|
5
|
+
Tracks movement patterns to determine if vehicles are parked or moving.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from typing import Any, Dict, List, Optional, Tuple
|
|
9
|
+
from dataclasses import dataclass, field
|
|
10
|
+
from collections import deque
|
|
11
|
+
import math
|
|
12
|
+
import logging
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@dataclass
|
|
16
|
+
class VehicleParkingState:
|
|
17
|
+
"""Per-vehicle parking state tracking"""
|
|
18
|
+
track_id: int
|
|
19
|
+
category: str
|
|
20
|
+
first_seen_frame: int
|
|
21
|
+
first_seen_timestamp: str
|
|
22
|
+
last_seen_frame: int
|
|
23
|
+
last_seen_timestamp: str
|
|
24
|
+
|
|
25
|
+
# Position tracking for movement detection
|
|
26
|
+
position_history: deque = field(default_factory=lambda: deque(maxlen=60))
|
|
27
|
+
bbox_size_history: deque = field(default_factory=lambda: deque(maxlen=60))
|
|
28
|
+
|
|
29
|
+
# Parking status
|
|
30
|
+
is_parked: bool = False
|
|
31
|
+
parked_since_frame: Optional[int] = None
|
|
32
|
+
parked_since_timestamp: Optional[str] = None
|
|
33
|
+
total_parked_frames: int = 0
|
|
34
|
+
|
|
35
|
+
# Movement metrics
|
|
36
|
+
movement_variance: float = 0.0
|
|
37
|
+
|
|
38
|
+
@property
|
|
39
|
+
def dwell_time_frames(self) -> int:
|
|
40
|
+
"""Total frames vehicle has been tracked"""
|
|
41
|
+
return self.last_seen_frame - self.first_seen_frame + 1
|
|
42
|
+
|
|
43
|
+
@property
|
|
44
|
+
def parked_time_frames(self) -> int:
|
|
45
|
+
"""Total frames vehicle has been parked"""
|
|
46
|
+
return self.total_parked_frames if self.is_parked else 0
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class ParkingAnalyticsTracker:
|
|
50
|
+
"""
|
|
51
|
+
Tracks parking duration and status for vehicles.
|
|
52
|
+
|
|
53
|
+
Determines if vehicles are parked based on movement patterns:
|
|
54
|
+
- Tracks bbox position over a sliding window (default 60 frames)
|
|
55
|
+
- Calculates movement as percentage of bbox size
|
|
56
|
+
- Marks vehicle as parked after threshold duration of stationary behavior
|
|
57
|
+
"""
|
|
58
|
+
|
|
59
|
+
def __init__(
|
|
60
|
+
self,
|
|
61
|
+
parked_threshold_frames: int = 150, # 5s @ 30fps
|
|
62
|
+
movement_threshold_percent: float = 5.0,
|
|
63
|
+
movement_window_frames: int = 60,
|
|
64
|
+
fps: float = 30.0
|
|
65
|
+
):
|
|
66
|
+
"""
|
|
67
|
+
Initialize parking analytics tracker.
|
|
68
|
+
|
|
69
|
+
Args:
|
|
70
|
+
parked_threshold_frames: Frames vehicle must be stationary to be marked as parked
|
|
71
|
+
movement_threshold_percent: Max movement % of bbox size to be considered stationary
|
|
72
|
+
movement_window_frames: Number of frames to analyze for movement
|
|
73
|
+
fps: Frames per second for time calculations
|
|
74
|
+
"""
|
|
75
|
+
self.parked_threshold_frames = parked_threshold_frames
|
|
76
|
+
self.movement_threshold_percent = movement_threshold_percent
|
|
77
|
+
self.movement_window_frames = movement_window_frames
|
|
78
|
+
self.fps = fps
|
|
79
|
+
|
|
80
|
+
self.active_tracks: Dict[int, VehicleParkingState] = {}
|
|
81
|
+
self.removed_tracks: deque = deque(maxlen=100)
|
|
82
|
+
|
|
83
|
+
self.logger = logging.getLogger(__name__)
|
|
84
|
+
|
|
85
|
+
self.logger.info(
|
|
86
|
+
f"ParkingAnalyticsTracker initialized: "
|
|
87
|
+
f"parked_threshold={parked_threshold_frames}f ({parked_threshold_frames/fps:.1f}s), "
|
|
88
|
+
f"movement_threshold={movement_threshold_percent}%, "
|
|
89
|
+
f"window={movement_window_frames}f"
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
def update(
|
|
93
|
+
self,
|
|
94
|
+
detections: List[Dict],
|
|
95
|
+
current_frame: int,
|
|
96
|
+
current_timestamp: str
|
|
97
|
+
) -> Dict[str, Any]:
|
|
98
|
+
"""
|
|
99
|
+
Update parking analytics with current frame detections.
|
|
100
|
+
|
|
101
|
+
Args:
|
|
102
|
+
detections: List of detection dicts with track_id, category, bounding_box
|
|
103
|
+
current_frame: Current frame number
|
|
104
|
+
current_timestamp: Current timestamp string
|
|
105
|
+
|
|
106
|
+
Returns:
|
|
107
|
+
Analytics summary dict with active_vehicles, parked_vehicles, and summary stats
|
|
108
|
+
"""
|
|
109
|
+
current_track_ids = set()
|
|
110
|
+
|
|
111
|
+
# Log input summary
|
|
112
|
+
self.logger.debug(
|
|
113
|
+
f"[Frame {current_frame}] Parking analytics: "
|
|
114
|
+
f"{len(detections)} detections, {len(self.active_tracks)} active"
|
|
115
|
+
)
|
|
116
|
+
|
|
117
|
+
# Process each detection
|
|
118
|
+
for det in detections:
|
|
119
|
+
track_id = det.get("track_id")
|
|
120
|
+
if track_id is None:
|
|
121
|
+
continue
|
|
122
|
+
|
|
123
|
+
current_track_ids.add(track_id)
|
|
124
|
+
|
|
125
|
+
# Get bbox center and size
|
|
126
|
+
bbox = det.get("bounding_box", {})
|
|
127
|
+
center = self._get_bbox_center(bbox)
|
|
128
|
+
size = self._get_bbox_size(bbox)
|
|
129
|
+
|
|
130
|
+
if track_id not in self.active_tracks:
|
|
131
|
+
# New track - initialize
|
|
132
|
+
self._initialize_track(
|
|
133
|
+
track_id=track_id,
|
|
134
|
+
category=det.get("category", "unknown"),
|
|
135
|
+
first_frame=current_frame,
|
|
136
|
+
first_timestamp=current_timestamp,
|
|
137
|
+
center=center,
|
|
138
|
+
size=size
|
|
139
|
+
)
|
|
140
|
+
else:
|
|
141
|
+
# Update existing track
|
|
142
|
+
self._update_track(
|
|
143
|
+
track_id=track_id,
|
|
144
|
+
category=det.get("category"),
|
|
145
|
+
current_frame=current_frame,
|
|
146
|
+
current_timestamp=current_timestamp,
|
|
147
|
+
center=center,
|
|
148
|
+
size=size
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
# Handle removed tracks
|
|
152
|
+
removed_ids = set(self.active_tracks.keys()) - current_track_ids
|
|
153
|
+
if removed_ids:
|
|
154
|
+
self.logger.debug(
|
|
155
|
+
f"[Frame {current_frame}] Archiving {len(removed_ids)} removed: "
|
|
156
|
+
f"{list(removed_ids)[:5]}"
|
|
157
|
+
)
|
|
158
|
+
for track_id in removed_ids:
|
|
159
|
+
self._archive_track(track_id)
|
|
160
|
+
|
|
161
|
+
# Generate analytics summary
|
|
162
|
+
analytics = self._generate_analytics_summary(current_frame, current_timestamp)
|
|
163
|
+
|
|
164
|
+
# Log summary
|
|
165
|
+
self.logger.debug(
|
|
166
|
+
f"[Frame {current_frame}] Summary: "
|
|
167
|
+
f"active={analytics['summary']['total_active']}, "
|
|
168
|
+
f"parked={analytics['summary']['total_parked']}, "
|
|
169
|
+
f"avg_dwell={analytics['summary']['average_dwell_time']:.1f}s"
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
return analytics
|
|
173
|
+
|
|
174
|
+
def _initialize_track(self, track_id, category, first_frame, first_timestamp, center, size):
|
|
175
|
+
"""Initialize new vehicle track"""
|
|
176
|
+
self.active_tracks[track_id] = VehicleParkingState(
|
|
177
|
+
track_id=track_id,
|
|
178
|
+
category=category,
|
|
179
|
+
first_seen_frame=first_frame,
|
|
180
|
+
first_seen_timestamp=first_timestamp,
|
|
181
|
+
last_seen_frame=first_frame,
|
|
182
|
+
last_seen_timestamp=first_timestamp
|
|
183
|
+
)
|
|
184
|
+
|
|
185
|
+
# Add initial position
|
|
186
|
+
self.active_tracks[track_id].position_history.append(center)
|
|
187
|
+
self.active_tracks[track_id].bbox_size_history.append(size)
|
|
188
|
+
|
|
189
|
+
self.logger.debug(
|
|
190
|
+
f"Init track {track_id} ({category}): "
|
|
191
|
+
f"pos=({center[0]:.0f},{center[1]:.0f}), size={size:.0f}"
|
|
192
|
+
)
|
|
193
|
+
|
|
194
|
+
def _update_track(self, track_id, category, current_frame, current_timestamp, center, size):
|
|
195
|
+
"""Update existing track with new detection"""
|
|
196
|
+
track_state = self.active_tracks[track_id]
|
|
197
|
+
|
|
198
|
+
# Update basic info
|
|
199
|
+
track_state.category = category
|
|
200
|
+
track_state.last_seen_frame = current_frame
|
|
201
|
+
track_state.last_seen_timestamp = current_timestamp
|
|
202
|
+
|
|
203
|
+
# Update position history
|
|
204
|
+
track_state.position_history.append(center)
|
|
205
|
+
track_state.bbox_size_history.append(size)
|
|
206
|
+
|
|
207
|
+
# Calculate movement and update parked status
|
|
208
|
+
self._update_parking_status(track_state, current_frame, current_timestamp)
|
|
209
|
+
|
|
210
|
+
def _update_parking_status(self, track_state: VehicleParkingState, current_frame: int, timestamp: str):
|
|
211
|
+
"""Determine if vehicle is parked based on movement"""
|
|
212
|
+
|
|
213
|
+
# Need sufficient history
|
|
214
|
+
if len(track_state.position_history) < self.movement_window_frames:
|
|
215
|
+
return
|
|
216
|
+
|
|
217
|
+
# Calculate movement variance relative to bbox size
|
|
218
|
+
positions = list(track_state.position_history)
|
|
219
|
+
sizes = list(track_state.bbox_size_history)
|
|
220
|
+
avg_size = sum(sizes) / len(sizes)
|
|
221
|
+
|
|
222
|
+
# Calculate max displacement in window
|
|
223
|
+
max_displacement = 0.0
|
|
224
|
+
for i in range(1, len(positions)):
|
|
225
|
+
dx = positions[i][0] - positions[i-1][0]
|
|
226
|
+
dy = positions[i][1] - positions[i-1][1]
|
|
227
|
+
displacement = math.sqrt(dx*dx + dy*dy)
|
|
228
|
+
max_displacement = max(max_displacement, displacement)
|
|
229
|
+
|
|
230
|
+
# Movement as percentage of average bbox size
|
|
231
|
+
movement_percent = (max_displacement / avg_size) * 100 if avg_size > 0 else 0
|
|
232
|
+
track_state.movement_variance = movement_percent
|
|
233
|
+
|
|
234
|
+
# Determine parked status
|
|
235
|
+
is_stationary = movement_percent < self.movement_threshold_percent
|
|
236
|
+
|
|
237
|
+
if is_stationary:
|
|
238
|
+
if not track_state.is_parked:
|
|
239
|
+
# Check if vehicle has been stationary long enough
|
|
240
|
+
if track_state.dwell_time_frames >= self.parked_threshold_frames:
|
|
241
|
+
track_state.is_parked = True
|
|
242
|
+
track_state.parked_since_frame = current_frame
|
|
243
|
+
track_state.parked_since_timestamp = timestamp
|
|
244
|
+
|
|
245
|
+
self.logger.info(
|
|
246
|
+
f"Track {track_state.track_id} ({track_state.category}) PARKED: "
|
|
247
|
+
f"dwell={track_state.dwell_time_frames}f "
|
|
248
|
+
f"({track_state.dwell_time_frames/self.fps:.1f}s), "
|
|
249
|
+
f"movement={movement_percent:.2f}%"
|
|
250
|
+
)
|
|
251
|
+
|
|
252
|
+
# Increment parked time if already parked
|
|
253
|
+
if track_state.is_parked:
|
|
254
|
+
track_state.total_parked_frames += 1
|
|
255
|
+
else:
|
|
256
|
+
# Moving - reset parked status if was parked
|
|
257
|
+
if track_state.is_parked:
|
|
258
|
+
self.logger.info(
|
|
259
|
+
f"Track {track_state.track_id} MOVING: "
|
|
260
|
+
f"movement={movement_percent:.1f}% > {self.movement_threshold_percent}%, "
|
|
261
|
+
f"was_parked={track_state.total_parked_frames}f "
|
|
262
|
+
f"({track_state.total_parked_frames/self.fps:.1f}s)"
|
|
263
|
+
)
|
|
264
|
+
track_state.is_parked = False
|
|
265
|
+
track_state.parked_since_frame = None
|
|
266
|
+
track_state.parked_since_timestamp = None
|
|
267
|
+
|
|
268
|
+
def _archive_track(self, track_id: int):
|
|
269
|
+
"""Move track from active to removed history"""
|
|
270
|
+
if track_id in self.active_tracks:
|
|
271
|
+
track_state = self.active_tracks.pop(track_id)
|
|
272
|
+
self.removed_tracks.append(track_state)
|
|
273
|
+
|
|
274
|
+
self.logger.debug(
|
|
275
|
+
f"Archive {track_id}: dwell={track_state.dwell_time_frames}f, "
|
|
276
|
+
f"parked={track_state.total_parked_frames}f, "
|
|
277
|
+
f"status={'PARKED' if track_state.is_parked else 'MOVING'}"
|
|
278
|
+
)
|
|
279
|
+
|
|
280
|
+
def _generate_analytics_summary(self, current_frame: int, timestamp: str) -> Dict[str, Any]:
|
|
281
|
+
"""Generate summary of parking analytics"""
|
|
282
|
+
|
|
283
|
+
active_vehicles = []
|
|
284
|
+
parked_vehicles = []
|
|
285
|
+
|
|
286
|
+
for track_state in self.active_tracks.values():
|
|
287
|
+
vehicle_data = {
|
|
288
|
+
"track_id": track_state.track_id,
|
|
289
|
+
"category": track_state.category,
|
|
290
|
+
"dwell_time_seconds": round(track_state.dwell_time_frames / self.fps, 1),
|
|
291
|
+
"dwell_time_frames": track_state.dwell_time_frames,
|
|
292
|
+
"is_parked": track_state.is_parked,
|
|
293
|
+
"movement_percent": round(track_state.movement_variance, 2),
|
|
294
|
+
"first_seen": track_state.first_seen_timestamp,
|
|
295
|
+
"last_seen": track_state.last_seen_timestamp
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
if track_state.is_parked:
|
|
299
|
+
vehicle_data["parked_time_seconds"] = round(track_state.parked_time_frames / self.fps, 1)
|
|
300
|
+
vehicle_data["parked_since"] = track_state.parked_since_timestamp
|
|
301
|
+
parked_vehicles.append(vehicle_data)
|
|
302
|
+
|
|
303
|
+
active_vehicles.append(vehicle_data)
|
|
304
|
+
|
|
305
|
+
return {
|
|
306
|
+
"active_vehicles": active_vehicles,
|
|
307
|
+
"parked_vehicles": parked_vehicles,
|
|
308
|
+
"summary": {
|
|
309
|
+
"total_active": len(active_vehicles),
|
|
310
|
+
"total_parked": len(parked_vehicles),
|
|
311
|
+
"average_dwell_time": self._calculate_average_dwell_time(),
|
|
312
|
+
"longest_parked": self._get_longest_parked()
|
|
313
|
+
},
|
|
314
|
+
"timestamp": timestamp,
|
|
315
|
+
"frame": current_frame
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
def _calculate_average_dwell_time(self) -> float:
|
|
319
|
+
"""Calculate average dwell time of active vehicles"""
|
|
320
|
+
if not self.active_tracks:
|
|
321
|
+
return 0.0
|
|
322
|
+
total_frames = sum(t.dwell_time_frames for t in self.active_tracks.values())
|
|
323
|
+
return round((total_frames / len(self.active_tracks)) / self.fps, 1)
|
|
324
|
+
|
|
325
|
+
def _get_longest_parked(self) -> Optional[Dict]:
|
|
326
|
+
"""Get vehicle with longest parking duration"""
|
|
327
|
+
parked = [t for t in self.active_tracks.values() if t.is_parked]
|
|
328
|
+
if not parked:
|
|
329
|
+
return None
|
|
330
|
+
|
|
331
|
+
longest = max(parked, key=lambda t: t.total_parked_frames)
|
|
332
|
+
return {
|
|
333
|
+
"track_id": longest.track_id,
|
|
334
|
+
"category": longest.category,
|
|
335
|
+
"parked_time_seconds": round(longest.total_parked_frames / self.fps, 1),
|
|
336
|
+
"parked_since": longest.parked_since_timestamp
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
@staticmethod
|
|
340
|
+
def _get_bbox_center(bbox: Dict) -> Tuple[float, float]:
|
|
341
|
+
"""Extract center point from bounding box"""
|
|
342
|
+
if "xmin" in bbox and "xmax" in bbox:
|
|
343
|
+
return ((bbox["xmin"] + bbox["xmax"]) / 2, (bbox["ymin"] + bbox["ymax"]) / 2)
|
|
344
|
+
elif "x1" in bbox and "x2" in bbox:
|
|
345
|
+
return ((bbox["x1"] + bbox["x2"]) / 2, (bbox["y1"] + bbox["y2"]) / 2)
|
|
346
|
+
return (0.0, 0.0)
|
|
347
|
+
|
|
348
|
+
@staticmethod
|
|
349
|
+
def _get_bbox_size(bbox: Dict) -> float:
|
|
350
|
+
"""Calculate bbox diagonal size (for relative movement calculation)"""
|
|
351
|
+
if "xmin" in bbox and "xmax" in bbox:
|
|
352
|
+
w = bbox["xmax"] - bbox["xmin"]
|
|
353
|
+
h = bbox["ymax"] - bbox["ymin"]
|
|
354
|
+
elif "x1" in bbox and "x2" in bbox:
|
|
355
|
+
w = bbox["x2"] - bbox["x1"]
|
|
356
|
+
h = bbox["y2"] - bbox["y1"]
|
|
357
|
+
else:
|
|
358
|
+
return 0.0
|
|
359
|
+
return math.sqrt(w*w + h*h)
|