matrice-analytics 0.1.89__py3-none-any.whl → 0.1.97__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- matrice_analytics/post_processing/__init__.py +21 -2
- matrice_analytics/post_processing/config.py +6 -0
- matrice_analytics/post_processing/core/config.py +102 -3
- matrice_analytics/post_processing/face_reg/face_recognition.py +146 -14
- matrice_analytics/post_processing/face_reg/face_recognition_client.py +116 -4
- matrice_analytics/post_processing/face_reg/people_activity_logging.py +19 -0
- matrice_analytics/post_processing/post_processor.py +12 -0
- matrice_analytics/post_processing/usecases/__init__.py +9 -0
- matrice_analytics/post_processing/usecases/advanced_customer_service.py +5 -2
- matrice_analytics/post_processing/usecases/color_detection.py +1 -0
- matrice_analytics/post_processing/usecases/fire_detection.py +94 -14
- matrice_analytics/post_processing/usecases/footfall.py +750 -0
- matrice_analytics/post_processing/usecases/license_plate_monitoring.py +91 -1
- matrice_analytics/post_processing/usecases/people_counting.py +55 -22
- matrice_analytics/post_processing/usecases/vehicle_monitoring.py +15 -32
- matrice_analytics/post_processing/usecases/vehicle_monitoring_drone_view.py +1007 -0
- matrice_analytics/post_processing/usecases/vehicle_monitoring_parking_lot.py +1011 -0
- matrice_analytics/post_processing/usecases/weapon_detection.py +2 -1
- matrice_analytics/post_processing/utils/alert_instance_utils.py +94 -26
- matrice_analytics/post_processing/utils/business_metrics_manager_utils.py +97 -4
- matrice_analytics/post_processing/utils/incident_manager_utils.py +103 -6
- {matrice_analytics-0.1.89.dist-info → matrice_analytics-0.1.97.dist-info}/METADATA +1 -1
- {matrice_analytics-0.1.89.dist-info → matrice_analytics-0.1.97.dist-info}/RECORD +26 -23
- {matrice_analytics-0.1.89.dist-info → matrice_analytics-0.1.97.dist-info}/WHEEL +0 -0
- {matrice_analytics-0.1.89.dist-info → matrice_analytics-0.1.97.dist-info}/licenses/LICENSE.txt +0 -0
- {matrice_analytics-0.1.89.dist-info → matrice_analytics-0.1.97.dist-info}/top_level.txt +0 -0
|
@@ -317,7 +317,7 @@ class WeaponDetectionUseCase(BaseProcessor):
|
|
|
317
317
|
total_counts = [{"category": cat, "count": count} for cat, count in total_counts_dict.items() if count > 0]
|
|
318
318
|
|
|
319
319
|
# Build current_counts
|
|
320
|
-
current_counts = [{"category":
|
|
320
|
+
current_counts = [{"category": 'Weapon', "count": count} for cat, count in per_category_count.items() if count > 0]
|
|
321
321
|
|
|
322
322
|
# Prepare detections
|
|
323
323
|
detections = []
|
|
@@ -377,6 +377,7 @@ class WeaponDetectionUseCase(BaseProcessor):
|
|
|
377
377
|
start_time=high_precision_start_timestamp,
|
|
378
378
|
reset_time=high_precision_reset_timestamp
|
|
379
379
|
)
|
|
380
|
+
tracking_stat['target_categories'] = ['Weapon']
|
|
380
381
|
tracking_stats.append(tracking_stat)
|
|
381
382
|
return tracking_stats
|
|
382
383
|
|
|
@@ -76,7 +76,8 @@ class ALERT_INSTANCE:
|
|
|
76
76
|
config_topic: str = "alert_instant_config_request",
|
|
77
77
|
trigger_topic: str = "alert_instant_triggered",
|
|
78
78
|
polling_interval: int = 10,
|
|
79
|
-
logger: Optional[logging.Logger] = None
|
|
79
|
+
logger: Optional[logging.Logger] = None,
|
|
80
|
+
app_deployment_id: Optional[str] = None
|
|
80
81
|
):
|
|
81
82
|
"""
|
|
82
83
|
Initialize ALERT_INSTANCE.
|
|
@@ -88,6 +89,7 @@ class ALERT_INSTANCE:
|
|
|
88
89
|
trigger_topic: Topic/stream name for publishing triggers
|
|
89
90
|
polling_interval: Seconds between config polling
|
|
90
91
|
logger: Python logger instance
|
|
92
|
+
app_deployment_id: App deployment ID to filter incoming alerts (only process alerts matching this ID)
|
|
91
93
|
"""
|
|
92
94
|
self.redis_client = redis_client
|
|
93
95
|
self.kafka_client = kafka_client
|
|
@@ -95,6 +97,7 @@ class ALERT_INSTANCE:
|
|
|
95
97
|
self.trigger_topic = trigger_topic
|
|
96
98
|
self.polling_interval = polling_interval
|
|
97
99
|
self.logger = logger or logging.getLogger(__name__)
|
|
100
|
+
self.app_deployment_id = app_deployment_id
|
|
98
101
|
|
|
99
102
|
# In-memory alert storage: {instant_alert_id: AlertConfig}
|
|
100
103
|
self._alerts: Dict[str, AlertConfig] = {}
|
|
@@ -114,7 +117,7 @@ class ALERT_INSTANCE:
|
|
|
114
117
|
self.logger.info(
|
|
115
118
|
f"Initialized ALERT_INSTANCE with config_topic={config_topic}, "
|
|
116
119
|
f"trigger_topic={trigger_topic}, polling_interval={polling_interval}s, "
|
|
117
|
-
f"cooldown={self._cooldown_seconds}s"
|
|
120
|
+
f"cooldown={self._cooldown_seconds}s, app_deployment_id={app_deployment_id}"
|
|
118
121
|
)
|
|
119
122
|
|
|
120
123
|
def start(self):
|
|
@@ -394,6 +397,29 @@ class ALERT_INSTANCE:
|
|
|
394
397
|
self.logger.warning("[ALERT_DEBUG] Skipping malformed config with 'raw' key only")
|
|
395
398
|
return
|
|
396
399
|
|
|
400
|
+
# Log detection_service field (informational only, no filtering)
|
|
401
|
+
detection_service = config_data.get('detection_service', '')
|
|
402
|
+
self.logger.info(f"[ALERT_DEBUG] detection_service: '{detection_service}'")
|
|
403
|
+
|
|
404
|
+
# Filter by app_deployment_id - only process alerts that match our app_deployment_id
|
|
405
|
+
incoming_app_deployment_id = config_data.get('app_deployment_id', '')
|
|
406
|
+
if self.app_deployment_id:
|
|
407
|
+
if incoming_app_deployment_id != self.app_deployment_id:
|
|
408
|
+
self.logger.info(
|
|
409
|
+
f"[ALERT_DEBUG] Skipping alert - app_deployment_id mismatch: "
|
|
410
|
+
f"incoming='{incoming_app_deployment_id}', ours='{self.app_deployment_id}'"
|
|
411
|
+
)
|
|
412
|
+
return
|
|
413
|
+
else:
|
|
414
|
+
self.logger.info(
|
|
415
|
+
f"[ALERT_DEBUG] ✓ app_deployment_id match: '{incoming_app_deployment_id}'"
|
|
416
|
+
)
|
|
417
|
+
else:
|
|
418
|
+
self.logger.warning(
|
|
419
|
+
f"[ALERT_DEBUG] No app_deployment_id filter set, processing all alerts. "
|
|
420
|
+
f"Incoming app_deployment_id: '{incoming_app_deployment_id}'"
|
|
421
|
+
)
|
|
422
|
+
|
|
397
423
|
# Log individual fields before creating AlertConfig
|
|
398
424
|
self.logger.debug(f"[ALERT_DEBUG] Extracted fields from config_data:")
|
|
399
425
|
self.logger.debug(f"[ALERT_DEBUG] - instant_alert_id: '{config_data.get('instant_alert_id', 'MISSING')}'")
|
|
@@ -706,7 +732,13 @@ class ALERT_INSTANCE:
|
|
|
706
732
|
detection: Dict[str, Any],
|
|
707
733
|
config: Dict[str, Any]
|
|
708
734
|
) -> bool:
|
|
709
|
-
"""
|
|
735
|
+
"""
|
|
736
|
+
Evaluate license plate detection against alert criteria.
|
|
737
|
+
|
|
738
|
+
Supports two alert conditions:
|
|
739
|
+
- "in_list" (BLACKLIST): Alert ONLY when detected plate IS in targetPlates list
|
|
740
|
+
- "not_in_list" (WHITELIST): Alert when detected plate is NOT in targetPlates list
|
|
741
|
+
"""
|
|
710
742
|
self.logger.debug(f"[ALERT_DEBUG] ========== EVALUATING LPR ALERT ==========")
|
|
711
743
|
self.logger.debug(f"[ALERT_DEBUG] Alert ID: {alert.instant_alert_id}")
|
|
712
744
|
self.logger.debug(f"[ALERT_DEBUG] Alert Name: {alert.alert_name}")
|
|
@@ -715,9 +747,12 @@ class ALERT_INSTANCE:
|
|
|
715
747
|
|
|
716
748
|
target_plates = config.get("targetPlates", [])
|
|
717
749
|
min_confidence = config.get("minConfidence", 0.0)
|
|
750
|
+
# Get alertCondition: "in_list" (blacklist) or "not_in_list" (whitelist)
|
|
751
|
+
alert_condition = config.get("alertCondition", "in_list")
|
|
718
752
|
|
|
719
753
|
self.logger.debug(f"[ALERT_DEBUG] Target plates: {target_plates}")
|
|
720
754
|
self.logger.debug(f"[ALERT_DEBUG] Min confidence: {min_confidence}")
|
|
755
|
+
self.logger.info(f"[ALERT_DEBUG] Alert condition: '{alert_condition}' (in_list=blacklist, not_in_list=whitelist)")
|
|
721
756
|
|
|
722
757
|
plate_number = detection.get("plateNumber", "").upper().strip()
|
|
723
758
|
confidence = detection.get("confidence", 0.0)
|
|
@@ -725,37 +760,68 @@ class ALERT_INSTANCE:
|
|
|
725
760
|
self.logger.debug(f"[ALERT_DEBUG] Detected plate (normalized): '{plate_number}'")
|
|
726
761
|
self.logger.debug(f"[ALERT_DEBUG] Detection confidence: {confidence}")
|
|
727
762
|
|
|
763
|
+
# Skip empty plate numbers
|
|
764
|
+
if not plate_number:
|
|
765
|
+
self.logger.debug(f"[ALERT_DEBUG] ✗ Empty plate number, skipping")
|
|
766
|
+
return False
|
|
767
|
+
|
|
728
768
|
# Check if plate matches target list (case-insensitive)
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
for target in target_plates
|
|
732
|
-
)
|
|
769
|
+
normalized_targets = [str(t).upper().strip() for t in target_plates]
|
|
770
|
+
plate_in_list = plate_number in normalized_targets
|
|
733
771
|
|
|
734
|
-
self.logger.debug(f"[ALERT_DEBUG]
|
|
735
|
-
|
|
736
|
-
normalized_targets = [str(t).upper().strip() for t in target_plates]
|
|
737
|
-
self.logger.debug(f"[ALERT_DEBUG] Normalized target plates: {normalized_targets}")
|
|
738
|
-
self.logger.debug(f"[ALERT_DEBUG] Plate '{plate_number}' not in {normalized_targets}")
|
|
772
|
+
self.logger.debug(f"[ALERT_DEBUG] Normalized target plates: {normalized_targets}")
|
|
773
|
+
self.logger.debug(f"[ALERT_DEBUG] Plate '{plate_number}' in list: {plate_in_list}")
|
|
739
774
|
|
|
740
|
-
# Check confidence threshold
|
|
741
|
-
min_confidence=0.05
|
|
775
|
+
# Check confidence threshold (minimum 0.05)
|
|
776
|
+
min_confidence = max(0.05, min_confidence)
|
|
742
777
|
confidence_match = confidence >= min_confidence
|
|
743
778
|
|
|
744
779
|
self.logger.debug(f"[ALERT_DEBUG] Confidence match result: {confidence_match} ({confidence} >= {min_confidence})")
|
|
745
780
|
|
|
746
|
-
if
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
|
|
781
|
+
# Determine if alert should trigger based on alertCondition
|
|
782
|
+
should_trigger = False
|
|
783
|
+
|
|
784
|
+
if alert_condition == "in_list":
|
|
785
|
+
# BLACKLIST: Alert only when plate IS in the target list
|
|
786
|
+
if plate_in_list and confidence_match:
|
|
787
|
+
should_trigger = True
|
|
788
|
+
self.logger.info(
|
|
789
|
+
f"[ALERT_DEBUG] ✓ LPR BLACKLIST ALERT TRIGGERED: {alert.alert_name} - "
|
|
790
|
+
f"Plate: {plate_number} IS in blacklist, Confidence: {confidence:.2f}"
|
|
791
|
+
)
|
|
792
|
+
else:
|
|
793
|
+
self.logger.debug(
|
|
794
|
+
f"[ALERT_DEBUG] ✗ LPR blacklist alert NOT triggered: {alert.alert_name} - "
|
|
795
|
+
f"Plate '{plate_number}' in_list={plate_in_list}, confidence_match={confidence_match}"
|
|
796
|
+
)
|
|
797
|
+
|
|
798
|
+
elif alert_condition == "not_in_list":
|
|
799
|
+
# WHITELIST: Alert when plate is NOT in the target list
|
|
800
|
+
if not plate_in_list and confidence_match:
|
|
801
|
+
should_trigger = True
|
|
802
|
+
self.logger.info(
|
|
803
|
+
f"[ALERT_DEBUG] ✓ LPR WHITELIST ALERT TRIGGERED: {alert.alert_name} - "
|
|
804
|
+
f"Plate: {plate_number} is NOT in whitelist, Confidence: {confidence:.2f}"
|
|
805
|
+
)
|
|
806
|
+
else:
|
|
807
|
+
self.logger.debug(
|
|
808
|
+
f"[ALERT_DEBUG] ✗ LPR whitelist alert NOT triggered: {alert.alert_name} - "
|
|
809
|
+
f"Plate '{plate_number}' in_list={plate_in_list} (whitelisted), confidence_match={confidence_match}"
|
|
810
|
+
)
|
|
811
|
+
|
|
752
812
|
else:
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
f"
|
|
813
|
+
# Unknown condition, default to blacklist behavior for backward compatibility
|
|
814
|
+
self.logger.warning(
|
|
815
|
+
f"[ALERT_DEBUG] Unknown alertCondition '{alert_condition}', defaulting to 'in_list' (blacklist) behavior"
|
|
756
816
|
)
|
|
817
|
+
if plate_in_list and confidence_match:
|
|
818
|
+
should_trigger = True
|
|
819
|
+
self.logger.info(
|
|
820
|
+
f"[ALERT_DEBUG] ✓ LPR ALERT TRIGGERED (default): {alert.alert_name} - "
|
|
821
|
+
f"Plate: {plate_number}, Confidence: {confidence:.2f}"
|
|
822
|
+
)
|
|
757
823
|
|
|
758
|
-
return
|
|
824
|
+
return should_trigger
|
|
759
825
|
|
|
760
826
|
def _evaluate_count_alert(
|
|
761
827
|
self,
|
|
@@ -868,10 +934,11 @@ class ALERT_INSTANCE:
|
|
|
868
934
|
"detectionType": detection_type_raw,
|
|
869
935
|
"confidence": detection.get("confidence", 0.0),
|
|
870
936
|
"coordinates": detection.get("coordinates", {}),
|
|
871
|
-
"cameraName": detection.get("cameraName", "")
|
|
937
|
+
"cameraName": detection.get("cameraName", ""),
|
|
938
|
+
"locationName": detection.get("locationName", "")
|
|
872
939
|
}
|
|
873
940
|
|
|
874
|
-
# Add type-specific
|
|
941
|
+
# Add type-specific fields
|
|
875
942
|
if detection_type_raw == "license_plate":
|
|
876
943
|
context_data.update({
|
|
877
944
|
"plateNumber": detection.get("plateNumber", ""),
|
|
@@ -900,6 +967,7 @@ class ALERT_INSTANCE:
|
|
|
900
967
|
trigger_message = {
|
|
901
968
|
"instant_alert_id": alert.instant_alert_id,
|
|
902
969
|
"camera_id": alert.camera_id,
|
|
970
|
+
"frame_id": detection.get("frame_id", ""),
|
|
903
971
|
"triggered_at": datetime.now(timezone.utc).isoformat(),
|
|
904
972
|
"context_data": context_data
|
|
905
973
|
}
|
|
@@ -28,6 +28,9 @@ DEFAULT_AGGREGATION_INTERVAL = 300
|
|
|
28
28
|
# Supported aggregation types
|
|
29
29
|
AGGREGATION_TYPES = ["mean", "min", "max", "sum"]
|
|
30
30
|
|
|
31
|
+
# Cache for location names to avoid repeated API calls
|
|
32
|
+
_location_name_cache: Dict[str, str] = {}
|
|
33
|
+
|
|
31
34
|
# Default metrics configuration with aggregation type
|
|
32
35
|
DEFAULT_METRICS_CONFIG = {
|
|
33
36
|
"customer_to_staff_ratio": "mean",
|
|
@@ -85,6 +88,8 @@ class CameraMetricsState:
|
|
|
85
88
|
camera_name: str = ""
|
|
86
89
|
app_deployment_id: str = ""
|
|
87
90
|
application_id: str = ""
|
|
91
|
+
location_id: str = ""
|
|
92
|
+
location_name: str = ""
|
|
88
93
|
metrics: Dict[str, MetricAggregator] = field(default_factory=dict)
|
|
89
94
|
last_push_time: float = field(default_factory=time.time)
|
|
90
95
|
|
|
@@ -311,13 +316,14 @@ class BUSINESS_METRICS_MANAGER:
|
|
|
311
316
|
stream_info: Stream metadata from usecase
|
|
312
317
|
|
|
313
318
|
Returns:
|
|
314
|
-
Dict with camera_id, camera_name, app_deployment_id, application_id
|
|
319
|
+
Dict with camera_id, camera_name, app_deployment_id, application_id, location_id
|
|
315
320
|
"""
|
|
316
321
|
result = {
|
|
317
322
|
"camera_id": "",
|
|
318
323
|
"camera_name": "",
|
|
319
324
|
"app_deployment_id": "",
|
|
320
|
-
"application_id": ""
|
|
325
|
+
"application_id": "",
|
|
326
|
+
"location_id": ""
|
|
321
327
|
}
|
|
322
328
|
|
|
323
329
|
if not stream_info:
|
|
@@ -393,6 +399,16 @@ class BUSINESS_METRICS_MANAGER:
|
|
|
393
399
|
""
|
|
394
400
|
)
|
|
395
401
|
|
|
402
|
+
# location_id - from camera_info.location
|
|
403
|
+
result["location_id"] = (
|
|
404
|
+
camera_info.get("location", "") or
|
|
405
|
+
camera_info.get("location_id", "") or
|
|
406
|
+
camera_info.get("locationId", "") or
|
|
407
|
+
input_camera_info.get("location", "") or
|
|
408
|
+
input_camera_info.get("location_id", "") or
|
|
409
|
+
""
|
|
410
|
+
)
|
|
411
|
+
|
|
396
412
|
self.logger.debug(f"[BUSINESS_METRICS_MANAGER] Extracted camera info: {result}")
|
|
397
413
|
|
|
398
414
|
except Exception as e:
|
|
@@ -400,6 +416,66 @@ class BUSINESS_METRICS_MANAGER:
|
|
|
400
416
|
|
|
401
417
|
return result
|
|
402
418
|
|
|
419
|
+
def _fetch_location_name(self, location_id: str) -> str:
|
|
420
|
+
"""
|
|
421
|
+
Fetch location name from API using location_id.
|
|
422
|
+
|
|
423
|
+
Args:
|
|
424
|
+
location_id: The location ID to look up
|
|
425
|
+
|
|
426
|
+
Returns:
|
|
427
|
+
Location name string, or 'Entry Reception' as default if API fails
|
|
428
|
+
"""
|
|
429
|
+
global _location_name_cache
|
|
430
|
+
default_location = "Entry Reception"
|
|
431
|
+
|
|
432
|
+
if not location_id:
|
|
433
|
+
self.logger.debug(f"[BUSINESS_METRICS_MANAGER] No location_id provided, using default: '{default_location}'")
|
|
434
|
+
return default_location
|
|
435
|
+
|
|
436
|
+
# Check cache first
|
|
437
|
+
if location_id in _location_name_cache:
|
|
438
|
+
cached_name = _location_name_cache[location_id]
|
|
439
|
+
self.logger.debug(f"[BUSINESS_METRICS_MANAGER] Using cached location name for '{location_id}': '{cached_name}'")
|
|
440
|
+
return cached_name
|
|
441
|
+
|
|
442
|
+
# Need factory reference with session to make API call
|
|
443
|
+
if not self._factory_ref or not self._factory_ref._session:
|
|
444
|
+
self.logger.warning(f"[BUSINESS_METRICS_MANAGER] No session available for location API, using default: '{default_location}'")
|
|
445
|
+
return default_location
|
|
446
|
+
|
|
447
|
+
try:
|
|
448
|
+
endpoint = f"/v1/inference/get_location/{location_id}"
|
|
449
|
+
self.logger.info(f"[BUSINESS_METRICS_MANAGER] Fetching location name from API: {endpoint}")
|
|
450
|
+
|
|
451
|
+
response = self._factory_ref._session.rpc.get(endpoint)
|
|
452
|
+
|
|
453
|
+
if response and isinstance(response, dict):
|
|
454
|
+
success = response.get("success", False)
|
|
455
|
+
if success:
|
|
456
|
+
data = response.get("data", {})
|
|
457
|
+
location_name = data.get("locationName", default_location)
|
|
458
|
+
self.logger.info(f"[BUSINESS_METRICS_MANAGER] ✓ Fetched location name: '{location_name}' for location_id: '{location_id}'")
|
|
459
|
+
|
|
460
|
+
# Cache the result
|
|
461
|
+
_location_name_cache[location_id] = location_name
|
|
462
|
+
return location_name
|
|
463
|
+
else:
|
|
464
|
+
self.logger.warning(
|
|
465
|
+
f"[BUSINESS_METRICS_MANAGER] API returned success=false for location_id '{location_id}': "
|
|
466
|
+
f"{response.get('message', 'Unknown error')}"
|
|
467
|
+
)
|
|
468
|
+
else:
|
|
469
|
+
self.logger.warning(f"[BUSINESS_METRICS_MANAGER] Invalid response format from API: {response}")
|
|
470
|
+
|
|
471
|
+
except Exception as e:
|
|
472
|
+
self.logger.error(f"[BUSINESS_METRICS_MANAGER] Error fetching location name for '{location_id}': {e}", exc_info=True)
|
|
473
|
+
|
|
474
|
+
# Use default on any failure
|
|
475
|
+
self.logger.info(f"[BUSINESS_METRICS_MANAGER] Using default location name: '{default_location}'")
|
|
476
|
+
_location_name_cache[location_id] = default_location
|
|
477
|
+
return default_location
|
|
478
|
+
|
|
403
479
|
def process_metrics(
|
|
404
480
|
self,
|
|
405
481
|
camera_id: str,
|
|
@@ -452,11 +528,16 @@ class BUSINESS_METRICS_MANAGER:
|
|
|
452
528
|
final_camera_name = camera_info.get("camera_name") or ""
|
|
453
529
|
final_app_deployment_id = camera_info.get("app_deployment_id") or factory_app_deployment_id or ""
|
|
454
530
|
final_application_id = camera_info.get("application_id") or factory_application_id or ""
|
|
531
|
+
final_location_id = camera_info.get("location_id") or ""
|
|
532
|
+
|
|
533
|
+
# Fetch location_name from API using location_id
|
|
534
|
+
final_location_name = self._fetch_location_name(final_location_id)
|
|
455
535
|
|
|
456
536
|
self.logger.info(
|
|
457
537
|
f"[BUSINESS_METRICS_MANAGER] Final values - camera_id={final_camera_id}, "
|
|
458
538
|
f"camera_name={final_camera_name}, app_deployment_id={final_app_deployment_id}, "
|
|
459
|
-
f"application_id={final_application_id}"
|
|
539
|
+
f"application_id={final_application_id}, location_id={final_location_id}, "
|
|
540
|
+
f"location_name={final_location_name}"
|
|
460
541
|
)
|
|
461
542
|
|
|
462
543
|
with self._states_lock:
|
|
@@ -466,7 +547,9 @@ class BUSINESS_METRICS_MANAGER:
|
|
|
466
547
|
camera_id=final_camera_id,
|
|
467
548
|
camera_name=final_camera_name,
|
|
468
549
|
app_deployment_id=final_app_deployment_id,
|
|
469
|
-
application_id=final_application_id
|
|
550
|
+
application_id=final_application_id,
|
|
551
|
+
location_id=final_location_id,
|
|
552
|
+
location_name=final_location_name
|
|
470
553
|
)
|
|
471
554
|
self.logger.info(
|
|
472
555
|
f"[BUSINESS_METRICS_MANAGER] ✓ Created new state for camera: {final_camera_id}"
|
|
@@ -483,6 +566,12 @@ class BUSINESS_METRICS_MANAGER:
|
|
|
483
566
|
self.logger.debug(f"[BUSINESS_METRICS_MANAGER] Updated app_deployment_id to: {final_app_deployment_id}")
|
|
484
567
|
if final_application_id and not state.application_id:
|
|
485
568
|
state.application_id = final_application_id
|
|
569
|
+
if final_location_id and not state.location_id:
|
|
570
|
+
state.location_id = final_location_id
|
|
571
|
+
self.logger.debug(f"[BUSINESS_METRICS_MANAGER] Updated location_id to: {final_location_id}")
|
|
572
|
+
if final_location_name and not state.location_name:
|
|
573
|
+
state.location_name = final_location_name
|
|
574
|
+
self.logger.debug(f"[BUSINESS_METRICS_MANAGER] Updated location_name to: {final_location_name}")
|
|
486
575
|
|
|
487
576
|
# Add each metric value to aggregator
|
|
488
577
|
metrics_added = 0
|
|
@@ -589,6 +678,7 @@ class BUSINESS_METRICS_MANAGER:
|
|
|
589
678
|
"camera_name": state.camera_name,
|
|
590
679
|
"app_deployment_id": state.app_deployment_id,
|
|
591
680
|
"application_id": state.application_id,
|
|
681
|
+
"location_name": state.location_name,
|
|
592
682
|
"business_metrics": aggregated_metrics,
|
|
593
683
|
"timestamp": datetime.now(timezone.utc).isoformat(),
|
|
594
684
|
"aggregation_interval_seconds": self.aggregation_interval
|
|
@@ -695,6 +785,8 @@ class BUSINESS_METRICS_MANAGER:
|
|
|
695
785
|
"camera_name": state.camera_name,
|
|
696
786
|
"app_deployment_id": state.app_deployment_id,
|
|
697
787
|
"application_id": state.application_id,
|
|
788
|
+
"location_id": state.location_id,
|
|
789
|
+
"location_name": state.location_name,
|
|
698
790
|
"metrics_count": {
|
|
699
791
|
name: len(agg.values)
|
|
700
792
|
for name, agg in state.metrics.items()
|
|
@@ -711,6 +803,7 @@ class BUSINESS_METRICS_MANAGER:
|
|
|
711
803
|
cam_id: {
|
|
712
804
|
"camera_id": state.camera_id,
|
|
713
805
|
"camera_name": state.camera_name,
|
|
806
|
+
"location_name": state.location_name,
|
|
714
807
|
"metrics_count": {
|
|
715
808
|
name: len(agg.values)
|
|
716
809
|
for name, agg in state.metrics.items()
|
|
@@ -38,6 +38,9 @@ DEFAULT_THRESHOLDS = [
|
|
|
38
38
|
{"level": "critical", "percentage": 30}
|
|
39
39
|
]
|
|
40
40
|
|
|
41
|
+
# Cache for location names to avoid repeated API calls
|
|
42
|
+
_location_name_cache: Dict[str, str] = {}
|
|
43
|
+
|
|
41
44
|
|
|
42
45
|
@dataclass
|
|
43
46
|
class IncidentState:
|
|
@@ -453,20 +456,22 @@ class INCIDENT_MANAGER:
|
|
|
453
456
|
'camera_group': 'staging-customer-1',
|
|
454
457
|
'location': '6908756db129880c34f2e09a'
|
|
455
458
|
},
|
|
456
|
-
'frame_id': '
|
|
459
|
+
'frame_id': '7b94e2f668fb456f95b73c3084e17f8a'
|
|
457
460
|
}
|
|
458
461
|
|
|
459
462
|
Args:
|
|
460
463
|
stream_info: Stream metadata from usecase
|
|
461
464
|
|
|
462
465
|
Returns:
|
|
463
|
-
Dict with camera_id, camera_name, app_deployment_id, application_id
|
|
466
|
+
Dict with camera_id, camera_name, app_deployment_id, application_id, frame_id, location_id
|
|
464
467
|
"""
|
|
465
468
|
result = {
|
|
466
469
|
"camera_id": "",
|
|
467
470
|
"camera_name": "",
|
|
468
471
|
"app_deployment_id": "",
|
|
469
|
-
"application_id": ""
|
|
472
|
+
"application_id": "",
|
|
473
|
+
"frame_id": "",
|
|
474
|
+
"location_id": ""
|
|
470
475
|
}
|
|
471
476
|
|
|
472
477
|
if not stream_info:
|
|
@@ -563,10 +568,30 @@ class INCIDENT_MANAGER:
|
|
|
563
568
|
""
|
|
564
569
|
)
|
|
565
570
|
|
|
571
|
+
# frame_id - at top level of stream_info
|
|
572
|
+
result["frame_id"] = (
|
|
573
|
+
stream_info.get("frame_id", "") or
|
|
574
|
+
stream_info.get("frameId", "") or
|
|
575
|
+
input_settings.get("frame_id", "") or
|
|
576
|
+
input_settings.get("frameId", "") or
|
|
577
|
+
""
|
|
578
|
+
)
|
|
579
|
+
|
|
580
|
+
# location_id - from camera_info.location
|
|
581
|
+
result["location_id"] = (
|
|
582
|
+
camera_info.get("location", "") or
|
|
583
|
+
camera_info.get("location_id", "") or
|
|
584
|
+
camera_info.get("locationId", "") or
|
|
585
|
+
input_camera_info.get("location", "") or
|
|
586
|
+
input_camera_info.get("location_id", "") or
|
|
587
|
+
""
|
|
588
|
+
)
|
|
589
|
+
|
|
566
590
|
self.logger.debug(
|
|
567
591
|
f"[INCIDENT_MANAGER] Extracted from stream_info - "
|
|
568
592
|
f"camera_id={result['camera_id']}, camera_name={result['camera_name']}, "
|
|
569
|
-
f"app_deployment_id={result['app_deployment_id']}, application_id={result['application_id']}"
|
|
593
|
+
f"app_deployment_id={result['app_deployment_id']}, application_id={result['application_id']}, "
|
|
594
|
+
f"frame_id={result['frame_id']}, location_id={result['location_id']}"
|
|
570
595
|
)
|
|
571
596
|
|
|
572
597
|
except Exception as e:
|
|
@@ -592,6 +617,66 @@ class INCIDENT_MANAGER:
|
|
|
592
617
|
return "high"
|
|
593
618
|
return level
|
|
594
619
|
|
|
620
|
+
def _fetch_location_name(self, location_id: str) -> str:
|
|
621
|
+
"""
|
|
622
|
+
Fetch location name from API using location_id.
|
|
623
|
+
|
|
624
|
+
Args:
|
|
625
|
+
location_id: The location ID to look up
|
|
626
|
+
|
|
627
|
+
Returns:
|
|
628
|
+
Location name string, or 'Entry Reception' as default if API fails
|
|
629
|
+
"""
|
|
630
|
+
global _location_name_cache
|
|
631
|
+
default_location = "Entry Reception"
|
|
632
|
+
|
|
633
|
+
if not location_id:
|
|
634
|
+
self.logger.debug(f"[INCIDENT_MANAGER] No location_id provided, using default: '{default_location}'")
|
|
635
|
+
return default_location
|
|
636
|
+
|
|
637
|
+
# Check cache first
|
|
638
|
+
if location_id in _location_name_cache:
|
|
639
|
+
cached_name = _location_name_cache[location_id]
|
|
640
|
+
self.logger.debug(f"[INCIDENT_MANAGER] Using cached location name for '{location_id}': '{cached_name}'")
|
|
641
|
+
return cached_name
|
|
642
|
+
|
|
643
|
+
# Need factory reference with session to make API call
|
|
644
|
+
if not self._factory_ref or not self._factory_ref._session:
|
|
645
|
+
self.logger.warning(f"[INCIDENT_MANAGER] No session available for location API, using default: '{default_location}'")
|
|
646
|
+
return default_location
|
|
647
|
+
|
|
648
|
+
try:
|
|
649
|
+
endpoint = f"/v1/inference/get_location/{location_id}"
|
|
650
|
+
self.logger.info(f"[INCIDENT_MANAGER] Fetching location name from API: {endpoint}")
|
|
651
|
+
|
|
652
|
+
response = self._factory_ref._session.rpc.get(endpoint)
|
|
653
|
+
|
|
654
|
+
if response and isinstance(response, dict):
|
|
655
|
+
success = response.get("success", False)
|
|
656
|
+
if success:
|
|
657
|
+
data = response.get("data", {})
|
|
658
|
+
location_name = data.get("locationName", default_location)
|
|
659
|
+
self.logger.info(f"[INCIDENT_MANAGER] ✓ Fetched location name: '{location_name}' for location_id: '{location_id}'")
|
|
660
|
+
|
|
661
|
+
# Cache the result
|
|
662
|
+
_location_name_cache[location_id] = location_name
|
|
663
|
+
return location_name
|
|
664
|
+
else:
|
|
665
|
+
self.logger.warning(
|
|
666
|
+
f"[INCIDENT_MANAGER] API returned success=false for location_id '{location_id}': "
|
|
667
|
+
f"{response.get('message', 'Unknown error')}"
|
|
668
|
+
)
|
|
669
|
+
else:
|
|
670
|
+
self.logger.warning(f"[INCIDENT_MANAGER] Invalid response format from API: {response}")
|
|
671
|
+
|
|
672
|
+
except Exception as e:
|
|
673
|
+
self.logger.error(f"[INCIDENT_MANAGER] Error fetching location name for '{location_id}': {e}", exc_info=True)
|
|
674
|
+
|
|
675
|
+
# Use default on any failure
|
|
676
|
+
self.logger.info(f"[INCIDENT_MANAGER] Using default location name: '{default_location}'")
|
|
677
|
+
_location_name_cache[location_id] = default_location
|
|
678
|
+
return default_location
|
|
679
|
+
|
|
595
680
|
def _generate_incident_id(self, camera_id: str, cycle_id: int) -> str:
|
|
596
681
|
"""Generate a unique incident_id for a camera's cycle."""
|
|
597
682
|
return f"incident_{camera_id}_{cycle_id}"
|
|
@@ -1005,6 +1090,8 @@ class INCIDENT_MANAGER:
|
|
|
1005
1090
|
"app_deployment_id": "...",
|
|
1006
1091
|
"application_id": "...",
|
|
1007
1092
|
"camera_name": "...",
|
|
1093
|
+
"frame_id": "...",
|
|
1094
|
+
"location_name": "...",
|
|
1008
1095
|
"incidents": [{
|
|
1009
1096
|
"incident_id": "...",
|
|
1010
1097
|
"incident_type": "...",
|
|
@@ -1061,10 +1148,18 @@ class INCIDENT_MANAGER:
|
|
|
1061
1148
|
""
|
|
1062
1149
|
)
|
|
1063
1150
|
|
|
1151
|
+
# Extract frame_id from stream_info
|
|
1152
|
+
final_frame_id = stream_camera_info.get("frame_id", "")
|
|
1153
|
+
|
|
1154
|
+
# Fetch location_name from API using location_id
|
|
1155
|
+
location_id = stream_camera_info.get("location_id", "")
|
|
1156
|
+
final_location_name = self._fetch_location_name(location_id)
|
|
1157
|
+
|
|
1064
1158
|
self.logger.info(
|
|
1065
1159
|
f"[INCIDENT_MANAGER] Building message with - "
|
|
1066
1160
|
f"camera_id={final_camera_id}, camera_name={final_camera_name}, "
|
|
1067
|
-
f"app_deployment_id={final_app_deployment_id}, application_id={final_application_id}"
|
|
1161
|
+
f"app_deployment_id={final_app_deployment_id}, application_id={final_application_id}, "
|
|
1162
|
+
f"frame_id={final_frame_id}, location_name={final_location_name}"
|
|
1068
1163
|
)
|
|
1069
1164
|
|
|
1070
1165
|
# Build incident - ONLY include required fields
|
|
@@ -1081,12 +1176,14 @@ class INCIDENT_MANAGER:
|
|
|
1081
1176
|
"human_text": incident_data.get("human_text", "")
|
|
1082
1177
|
}
|
|
1083
1178
|
|
|
1084
|
-
# Build final message
|
|
1179
|
+
# Build final message with all required fields
|
|
1085
1180
|
message = {
|
|
1086
1181
|
"camera_id": final_camera_id,
|
|
1087
1182
|
"app_deployment_id": final_app_deployment_id,
|
|
1088
1183
|
"application_id": final_application_id,
|
|
1089
1184
|
"camera_name": final_camera_name,
|
|
1185
|
+
"frame_id": final_frame_id,
|
|
1186
|
+
"location_name": final_location_name,
|
|
1090
1187
|
"incidents": [incident]
|
|
1091
1188
|
}
|
|
1092
1189
|
|