matrice-analytics 0.1.60__py3-none-any.whl → 0.1.70__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- matrice_analytics/post_processing/face_reg/embedding_manager.py +8 -8
- matrice_analytics/post_processing/face_reg/face_recognition.py +33 -29
- matrice_analytics/post_processing/face_reg/face_recognition_client.py +36 -0
- matrice_analytics/post_processing/usecases/fire_detection.py +2 -2
- matrice_analytics/post_processing/usecases/license_plate_monitoring.py +1 -1
- matrice_analytics/post_processing/usecases/weapon_detection.py +98 -22
- {matrice_analytics-0.1.60.dist-info → matrice_analytics-0.1.70.dist-info}/METADATA +1 -1
- {matrice_analytics-0.1.60.dist-info → matrice_analytics-0.1.70.dist-info}/RECORD +11 -11
- {matrice_analytics-0.1.60.dist-info → matrice_analytics-0.1.70.dist-info}/WHEEL +0 -0
- {matrice_analytics-0.1.60.dist-info → matrice_analytics-0.1.70.dist-info}/licenses/LICENSE.txt +0 -0
- {matrice_analytics-0.1.60.dist-info → matrice_analytics-0.1.70.dist-info}/top_level.txt +0 -0
|
@@ -234,10 +234,10 @@ class EmbeddingManager:
|
|
|
234
234
|
return False
|
|
235
235
|
|
|
236
236
|
try:
|
|
237
|
-
self.logger.info("Loading staff embeddings from API...")
|
|
238
|
-
print("=============== LOADING STAFF EMBEDDINGS FROM API ===============")
|
|
237
|
+
#self.logger.info("Loading staff embeddings from API...")
|
|
238
|
+
#print("=============== LOADING STAFF EMBEDDINGS FROM API ===============")
|
|
239
239
|
response = await self.face_client.get_all_staff_embeddings()
|
|
240
|
-
print(f"API RESPONSE TYPE: {type(response)}, IS_LIST: {isinstance(response, list)}, LEN: {len(response) if isinstance(response, list) else 'N/A'}")
|
|
240
|
+
#print(f"API RESPONSE TYPE: {type(response)}, IS_LIST: {isinstance(response, list)}, LEN: {len(response) if isinstance(response, list) else 'N/A'}")
|
|
241
241
|
|
|
242
242
|
# Robust response handling: accept dict with data or raw list
|
|
243
243
|
embeddings_data: List[Dict[str, Any]] = []
|
|
@@ -330,13 +330,13 @@ class EmbeddingManager:
|
|
|
330
330
|
self._embeddings_loaded = True # Mark as successfully loaded
|
|
331
331
|
|
|
332
332
|
self.logger.info(f"Successfully loaded and cached {len(self.staff_embeddings)} staff embeddings (dim={self.embeddings_matrix.shape[1]})")
|
|
333
|
-
print(f"=============== SUCCESS: LOADED {len(self.staff_embeddings)} EMBEDDINGS, MATRIX SHAPE: {self.embeddings_matrix.shape} ===============")
|
|
333
|
+
#print(f"=============== SUCCESS: LOADED {len(self.staff_embeddings)} EMBEDDINGS, MATRIX SHAPE: {self.embeddings_matrix.shape} ===============")
|
|
334
334
|
try:
|
|
335
335
|
# Quick sanity metrics
|
|
336
336
|
row0_sum = float(np.sum(self.embeddings_matrix[0])) if self.embeddings_matrix.shape[0] > 0 else 0.0
|
|
337
337
|
row0_norm = float(np.linalg.norm(self.embeddings_matrix[0])) if self.embeddings_matrix.shape[0] > 0 else 0.0
|
|
338
|
-
print(f"SANITY CHECK: row0_sum={row0_sum:.4f}, row0_norm={row0_norm:.4f} (should be ~1.0 after normalization)")
|
|
339
|
-
self.logger.debug(f"Embeddings matrix shape: {self.embeddings_matrix.shape}, dtype={self.embeddings_matrix.dtype}, row0_sum={row0_sum:.4f}")
|
|
338
|
+
#print(f"SANITY CHECK: row0_sum={row0_sum:.4f}, row0_norm={row0_norm:.4f} (should be ~1.0 after normalization)")
|
|
339
|
+
#self.logger.debug(f"Embeddings matrix shape: {self.embeddings_matrix.shape}, dtype={self.embeddings_matrix.dtype}, row0_sum={row0_sum:.4f}")
|
|
340
340
|
except Exception as e:
|
|
341
341
|
print(f"ERROR in sanity check: {e}")
|
|
342
342
|
return True
|
|
@@ -395,13 +395,13 @@ class EmbeddingManager:
|
|
|
395
395
|
"""Find best matching staff member using optimized matrix operations (thread-safe)."""
|
|
396
396
|
# Check if embeddings are loaded at all
|
|
397
397
|
if not self._embeddings_loaded:
|
|
398
|
-
print(f"ERROR: _find_best_local_match called but embeddings not loaded yet (_embeddings_loaded={self._embeddings_loaded})")
|
|
398
|
+
#print(f"ERROR: _find_best_local_match called but embeddings not loaded yet (_embeddings_loaded={self._embeddings_loaded})")
|
|
399
399
|
self.logger.error("Embeddings not loaded - _find_best_local_match cannot proceed")
|
|
400
400
|
return None
|
|
401
401
|
|
|
402
402
|
with self._embeddings_lock:
|
|
403
403
|
if self.embeddings_matrix is None or len(self.embedding_metadata) == 0:
|
|
404
|
-
print(f"ERROR: _find_best_local_match - embeddings_matrix is None={self.embeddings_matrix is None}, metadata_len={len(self.embedding_metadata)}, _embeddings_loaded={self._embeddings_loaded}")
|
|
404
|
+
#print(f"ERROR: _find_best_local_match - embeddings_matrix is None={self.embeddings_matrix is None}, metadata_len={len(self.embedding_metadata)}, _embeddings_loaded={self._embeddings_loaded}")
|
|
405
405
|
self.logger.error(f"Embeddings matrix is None despite _embeddings_loaded={self._embeddings_loaded}")
|
|
406
406
|
return None
|
|
407
407
|
|
|
@@ -427,9 +427,9 @@ class FaceRecognitionEmbeddingConfig(BaseConfig):
|
|
|
427
427
|
smoothing_confidence_range_factor: float = 0.5
|
|
428
428
|
|
|
429
429
|
# Base confidence threshold (separate from embedding similarity threshold)
|
|
430
|
-
similarity_threshold: float = 0.
|
|
430
|
+
similarity_threshold: float = 0.2 # Lowered to match local code - 0.45 was too conservative
|
|
431
431
|
# Base confidence threshold (separate from embedding similarity threshold)
|
|
432
|
-
confidence_threshold: float = 0.1
|
|
432
|
+
confidence_threshold: float = 0.1 # Detection confidence threshold
|
|
433
433
|
|
|
434
434
|
# Face recognition optional features
|
|
435
435
|
enable_face_tracking: bool = True # Enable BYTE TRACKER advanced face tracking -- KEEP IT TRUE ALWAYS
|
|
@@ -528,7 +528,7 @@ class FaceRecognitionEmbeddingUseCase(BaseProcessor):
|
|
|
528
528
|
# Removed lightweight face tracker fallback; we always use AdvancedTracker
|
|
529
529
|
# Optional gating similar to compare_similarity
|
|
530
530
|
self._track_first_seen: Dict[int, int] = {}
|
|
531
|
-
self._probation_frames: int =
|
|
531
|
+
self._probation_frames: int = 30 # Reduced from 260 - only for "Unknown" label suppression, not recognition
|
|
532
532
|
self._min_face_w: int = 30
|
|
533
533
|
self._min_face_h: int = 30
|
|
534
534
|
|
|
@@ -584,12 +584,12 @@ class FaceRecognitionEmbeddingUseCase(BaseProcessor):
|
|
|
584
584
|
raise TypeError(f"Invalid config type for initialization: {type(init_config)}, expected FaceRecognitionEmbeddingConfig")
|
|
585
585
|
|
|
586
586
|
self.logger.info("Initializing face recognition use case with provided config")
|
|
587
|
-
|
|
587
|
+
|
|
588
588
|
|
|
589
589
|
# Initialize face client (includes deployment update)
|
|
590
590
|
try:
|
|
591
591
|
self.face_client = await self._get_facial_recognition_client(init_config)
|
|
592
|
-
|
|
592
|
+
|
|
593
593
|
|
|
594
594
|
# Initialize People activity logging if enabled
|
|
595
595
|
if init_config.enable_people_activity_logging:
|
|
@@ -598,9 +598,9 @@ class FaceRecognitionEmbeddingUseCase(BaseProcessor):
|
|
|
598
598
|
self.logger.info("People activity logging enabled and started")
|
|
599
599
|
|
|
600
600
|
# Initialize EmbeddingManager
|
|
601
|
-
|
|
601
|
+
|
|
602
602
|
if not init_config.embedding_config:
|
|
603
|
-
|
|
603
|
+
|
|
604
604
|
init_config.embedding_config = EmbeddingConfig(
|
|
605
605
|
similarity_threshold=init_config.similarity_threshold,
|
|
606
606
|
confidence_threshold=init_config.confidence_threshold,
|
|
@@ -611,14 +611,11 @@ class FaceRecognitionEmbeddingUseCase(BaseProcessor):
|
|
|
611
611
|
staff_embeddings_cache_ttl=43200,
|
|
612
612
|
)
|
|
613
613
|
self.embedding_manager = EmbeddingManager(init_config.embedding_config, self.face_client)
|
|
614
|
-
|
|
614
|
+
|
|
615
615
|
self.logger.info("Embedding manager initialized")
|
|
616
616
|
|
|
617
|
-
# Load staff embeddings immediately for fast startup (avoid race conditions)
|
|
618
|
-
# This MUST succeed before we can proceed - fail fast if it doesn't
|
|
619
|
-
# print("=============== STEP 3: CALLING _load_staff_embeddings() ===============")
|
|
620
617
|
embeddings_loaded = await self.embedding_manager._load_staff_embeddings()
|
|
621
|
-
|
|
618
|
+
|
|
622
619
|
|
|
623
620
|
if not embeddings_loaded:
|
|
624
621
|
error_msg = "CRITICAL: Failed to load staff embeddings at initialization - cannot proceed without embeddings"
|
|
@@ -659,7 +656,7 @@ class FaceRecognitionEmbeddingUseCase(BaseProcessor):
|
|
|
659
656
|
self.logger.info("Temporal identity manager initialized with embedding manager for local similarity search")
|
|
660
657
|
|
|
661
658
|
# Final verification before marking as initialized
|
|
662
|
-
|
|
659
|
+
|
|
663
660
|
if not self.embedding_manager.is_ready():
|
|
664
661
|
status = self.embedding_manager.get_status()
|
|
665
662
|
error_msg = f"CRITICAL: Final verification failed - embeddings not ready. Status: {status}"
|
|
@@ -669,16 +666,10 @@ class FaceRecognitionEmbeddingUseCase(BaseProcessor):
|
|
|
669
666
|
|
|
670
667
|
# Log detailed status for debugging
|
|
671
668
|
status = self.embedding_manager.get_status()
|
|
672
|
-
# print(f"=============== FINAL CHECKS PASSED ===============")
|
|
673
|
-
# print(f" - Face client: {self.face_client is not None}")
|
|
674
|
-
# print(f" - Embedding manager: {self.embedding_manager is not None}")
|
|
675
|
-
# print(f" - Embedding manager status: {status}")
|
|
676
|
-
# print(f" - Temporal identity manager: {self.temporal_identity_manager is not None}")
|
|
677
669
|
|
|
678
670
|
self._initialized = True
|
|
679
671
|
self.logger.info("Face recognition use case fully initialized and verified")
|
|
680
|
-
|
|
681
|
-
|
|
672
|
+
|
|
682
673
|
except Exception as e:
|
|
683
674
|
self.logger.error(f"Error during use case initialization: {e}", exc_info=True)
|
|
684
675
|
raise RuntimeError(f"Failed to initialize face recognition use case: {e}") from e
|
|
@@ -700,9 +691,16 @@ class FaceRecognitionEmbeddingUseCase(BaseProcessor):
|
|
|
700
691
|
# Call update_deployment if deployment_id is provided
|
|
701
692
|
if config.deployment_id:
|
|
702
693
|
try:
|
|
694
|
+
self.logger.info(f"Updating deployment action with ID: {config.deployment_id}")
|
|
695
|
+
response = await self.face_client.update_deployment_action(config.deployment_id)
|
|
696
|
+
if response:
|
|
697
|
+
self.logger.info(f"Successfully updated deployment action {config.deployment_id}")
|
|
698
|
+
else:
|
|
699
|
+
self.logger.warning(f"Failed to update deployment: {response.get('error', 'Unknown error')}")
|
|
700
|
+
|
|
703
701
|
self.logger.info(f"Updating deployment with ID: {config.deployment_id}")
|
|
704
702
|
response = await self.face_client.update_deployment(config.deployment_id)
|
|
705
|
-
if response
|
|
703
|
+
if response:
|
|
706
704
|
self.logger.info(f"Successfully updated deployment {config.deployment_id}")
|
|
707
705
|
else:
|
|
708
706
|
self.logger.warning(f"Failed to update deployment: {response.get('error', 'Unknown error')}")
|
|
@@ -835,8 +833,8 @@ class FaceRecognitionEmbeddingUseCase(BaseProcessor):
|
|
|
835
833
|
track_low_thresh=0.05,
|
|
836
834
|
new_track_thresh=0.5,
|
|
837
835
|
match_thresh=0.8,
|
|
838
|
-
track_buffer=int(
|
|
839
|
-
max_time_lost=int(
|
|
836
|
+
track_buffer=int(600), # Increased to match local code - allows longer occlusions
|
|
837
|
+
max_time_lost=int(300), # Increased to match local code
|
|
840
838
|
fuse_score=True,
|
|
841
839
|
enable_gmc=False,
|
|
842
840
|
frame_rate=int(20)
|
|
@@ -1197,11 +1195,12 @@ class FaceRecognitionEmbeddingUseCase(BaseProcessor):
|
|
|
1197
1195
|
else:
|
|
1198
1196
|
age_frames = 1
|
|
1199
1197
|
|
|
1198
|
+
# Eligible for recognition if face is large enough (lowered threshold to match local code behavior)
|
|
1200
1199
|
eligible_for_recognition = (w_box >= self._min_face_w and h_box >= self._min_face_h)
|
|
1201
1200
|
|
|
1202
1201
|
# Primary: API-based identity smoothing via TemporalIdentityManager
|
|
1203
1202
|
staff_id = None
|
|
1204
|
-
person_name = "
|
|
1203
|
+
person_name = ""
|
|
1205
1204
|
similarity_score = 0.0
|
|
1206
1205
|
employee_id = None
|
|
1207
1206
|
staff_details: Dict[str, Any] = {}
|
|
@@ -1273,16 +1272,21 @@ class FaceRecognitionEmbeddingUseCase(BaseProcessor):
|
|
|
1273
1272
|
# Update detection object directly (avoid relying on SearchResult type)
|
|
1274
1273
|
detection = detection.copy()
|
|
1275
1274
|
detection["person_id"] = staff_id
|
|
1276
|
-
detection["person_name"] = person_name or "
|
|
1275
|
+
detection["person_name"] = person_name or ""
|
|
1277
1276
|
detection["recognition_status"] = "known" if staff_id else "unknown"
|
|
1278
1277
|
detection["employee_id"] = employee_id
|
|
1279
1278
|
detection["staff_details"] = staff_details if isinstance(staff_details, dict) else {}
|
|
1280
1279
|
detection["similarity_score"] = float(similarity_score)
|
|
1281
1280
|
detection["enrolled"] = bool(staff_id)
|
|
1282
|
-
# Display label policy: show
|
|
1281
|
+
# Display label policy: ALWAYS show identified faces immediately, only suppress "Unknown" during probation
|
|
1283
1282
|
is_identified = (staff_id is not None and detection_type == "known")
|
|
1284
|
-
|
|
1285
|
-
|
|
1283
|
+
if is_identified:
|
|
1284
|
+
# Identified faces: show name immediately (no probation delay)
|
|
1285
|
+
detection["display_name"] = person_name
|
|
1286
|
+
else:
|
|
1287
|
+
# Unknown faces: only show "Unknown" label after probation period to avoid flicker
|
|
1288
|
+
show_unknown_label = (age_frames >= self._probation_frames)
|
|
1289
|
+
detection["display_name"] = "" if show_unknown_label else "" #TODO: Maybe replace this with "Unknown" bec probationif fail we show unknown.
|
|
1286
1290
|
# Preserve original category (e.g., 'face') for tracking/counting
|
|
1287
1291
|
|
|
1288
1292
|
# Update global tracking per unique internal track id to avoid double-counting within a frame
|
|
@@ -1335,7 +1339,7 @@ class FaceRecognitionEmbeddingUseCase(BaseProcessor):
|
|
|
1335
1339
|
except Exception as e:
|
|
1336
1340
|
self.logger.error(f"Error enqueueing detection for activity logging: {e}")
|
|
1337
1341
|
# print("------------------PROCESS FACE LATENCY TOTAL----------------------------")
|
|
1338
|
-
print("LATENCY:",(time.time() - st2)*1000,"| Throughput fps:",(1.0 / (time.time() - st2)) if (time.time() - st2) > 0 else None)
|
|
1342
|
+
#print("LATENCY:",(time.time() - st2)*1000,"| Throughput fps:",(1.0 / (time.time() - st2)) if (time.time() - st2) > 0 else None)
|
|
1339
1343
|
# print("------------------PROCESS FACE LATENCY TOTAL----------------------------")
|
|
1340
1344
|
|
|
1341
1345
|
return detection
|
|
@@ -510,6 +510,42 @@ class FacialRecognitionClient:
|
|
|
510
510
|
except Exception as e:
|
|
511
511
|
self.logger.error(f"API ERROR: Update deployment request failed - deployment_id={deployment_id} - {e}", exc_info=True)
|
|
512
512
|
return {"success": False, "error": str(e)}
|
|
513
|
+
|
|
514
|
+
async def update_deployment_action(self, deployment_id: str) -> Dict[str, Any]:
|
|
515
|
+
"""Update deployment action in backend
|
|
516
|
+
|
|
517
|
+
API: PUT /internal/v1/actions/update_facial_recognition_deployment/:server_id?app_deployment_id=:deployment_id
|
|
518
|
+
|
|
519
|
+
Args:
|
|
520
|
+
deployment_id: The deployment ID to update
|
|
521
|
+
|
|
522
|
+
Returns:
|
|
523
|
+
Dict containing response data
|
|
524
|
+
"""
|
|
525
|
+
if not deployment_id:
|
|
526
|
+
self.logger.warning("No deployment_id provided for update_deployment_action")
|
|
527
|
+
return {"success": False, "error": "deployment_id is required"}
|
|
528
|
+
|
|
529
|
+
self.logger.info(f"API REQUEST: Updating deployment action - deployment_id={deployment_id}")
|
|
530
|
+
|
|
531
|
+
# Use Matrice session for async RPC call to backend (not facial recognition server)
|
|
532
|
+
try:
|
|
533
|
+
response = await self.session.rpc.async_send_request(
|
|
534
|
+
method="PUT",
|
|
535
|
+
path=f"/internal/v1/actions/update_facial_recognition_deployment/{self.server_id}?app_deployment_id={deployment_id}",
|
|
536
|
+
payload={},
|
|
537
|
+
base_url="https://prod.backend.app.matrice.ai"
|
|
538
|
+
)
|
|
539
|
+
|
|
540
|
+
if response.get('success', False):
|
|
541
|
+
self.logger.info(f"API RESPONSE: Deployment action updated successfully - deployment_id={deployment_id}")
|
|
542
|
+
else:
|
|
543
|
+
self.logger.warning(f"Failed to update deployment action for deployment_id={deployment_id}: {response.get('error', 'Unknown error')}")
|
|
544
|
+
|
|
545
|
+
return self._handle_response(response)
|
|
546
|
+
except Exception as e:
|
|
547
|
+
self.logger.error(f"API ERROR: Update deployment action request failed - deployment_id={deployment_id} - {e}", exc_info=True)
|
|
548
|
+
return {"success": False, "error": str(e)}
|
|
513
549
|
|
|
514
550
|
async def enroll_unknown_person(self, embedding: List[float], image_source: str = None, timestamp: str = None, location: str = None, employee_id: str = None) -> Dict[str, Any]:
|
|
515
551
|
"""Enroll an unknown person
|
|
@@ -245,8 +245,8 @@ class FireSmokeUseCase(BaseProcessor):
|
|
|
245
245
|
business_analytics = business_analytics_list[0] if business_analytics_list else []
|
|
246
246
|
summary = summary_list[0] if summary_list else {}
|
|
247
247
|
agg_summary = {str(frame_number): {
|
|
248
|
-
"incidents":
|
|
249
|
-
"tracking_stats":
|
|
248
|
+
"incidents": incidents,
|
|
249
|
+
"tracking_stats": tracking_stats,
|
|
250
250
|
"business_analytics": business_analytics,
|
|
251
251
|
"alerts": alerts,
|
|
252
252
|
"human_text": summary}
|
|
@@ -575,7 +575,7 @@ class LicensePlateMonitorUseCase(BaseProcessor):
|
|
|
575
575
|
print(f"[LP_LOGGING] Starting plate logging check - detections count: {len(detections)}")
|
|
576
576
|
self.logger.info(f"[LP_LOGGING] Starting plate logging check - detections count: {len(detections)}")
|
|
577
577
|
self.logger.info(f"[LP_LOGGING] Logging enabled: {self._logging_enabled}, Plate logger exists: {self.plate_logger is not None}, Stream info exists: {stream_info is not None}")
|
|
578
|
-
|
|
578
|
+
#self._logging_enabled=False
|
|
579
579
|
if not self._logging_enabled:
|
|
580
580
|
print("[LP_LOGGING] Plate logging is DISABLED")
|
|
581
581
|
self.logger.warning("[LP_LOGGING] Plate logging is DISABLED - logging_enabled flag is False")
|
|
@@ -585,23 +585,6 @@ class WeaponDetectionUseCase(BaseProcessor):
|
|
|
585
585
|
"""Return total unique track_id count for each category."""
|
|
586
586
|
return {cat: len(ids) for cat, ids in getattr(self, '_per_category_total_track_ids', {}).items()}
|
|
587
587
|
|
|
588
|
-
def _format_timestamp(self, timestamp: Any) -> str:
|
|
589
|
-
"""Format a timestamp so that exactly two digits follow the decimal point (milliseconds)."""
|
|
590
|
-
if isinstance(timestamp, (int, float)):
|
|
591
|
-
timestamp = datetime.fromtimestamp(timestamp, timezone.utc).strftime('%Y-%m-%d-%H:%M:%S.%f UTC')
|
|
592
|
-
if not isinstance(timestamp, str):
|
|
593
|
-
return str(timestamp)
|
|
594
|
-
if '.' not in timestamp:
|
|
595
|
-
return timestamp
|
|
596
|
-
main_part, fractional_and_suffix = timestamp.split('.', 1)
|
|
597
|
-
if ' ' in fractional_and_suffix:
|
|
598
|
-
fractional_part, suffix = fractional_and_suffix.split(' ', 1)
|
|
599
|
-
suffix = ' ' + suffix
|
|
600
|
-
else:
|
|
601
|
-
fractional_part, suffix = fractional_and_suffix, ''
|
|
602
|
-
fractional_part = (fractional_part + '00')[:2]
|
|
603
|
-
return f"{main_part}.{fractional_part}{suffix}"
|
|
604
|
-
|
|
605
588
|
def _format_timestamp_for_stream(self, timestamp: float) -> str:
|
|
606
589
|
"""Format timestamp for streams (YYYY:MM:DD HH:MM:SS format)."""
|
|
607
590
|
dt = datetime.fromtimestamp(timestamp, tz=timezone.utc)
|
|
@@ -614,8 +597,56 @@ class WeaponDetectionUseCase(BaseProcessor):
|
|
|
614
597
|
seconds = round(float(timestamp % 60), 2)
|
|
615
598
|
return f"{hours:02d}:{minutes:02d}:{seconds:.1f}"
|
|
616
599
|
|
|
600
|
+
def _format_timestamp(self, timestamp: Any) -> str:
|
|
601
|
+
"""Format a timestamp to match the current timestamp format: YYYY:MM:DD HH:MM:SS.
|
|
602
|
+
|
|
603
|
+
The input can be either:
|
|
604
|
+
1. A numeric Unix timestamp (``float`` / ``int``) – it will be converted to datetime.
|
|
605
|
+
2. A string in the format ``YYYY-MM-DD-HH:MM:SS.ffffff UTC``.
|
|
606
|
+
|
|
607
|
+
The returned value will be in the format: YYYY:MM:DD HH:MM:SS (no milliseconds, no UTC suffix).
|
|
608
|
+
|
|
609
|
+
Example
|
|
610
|
+
-------
|
|
611
|
+
>>> self._format_timestamp("2025-10-27-19:31:20.187574 UTC")
|
|
612
|
+
'2025:10:27 19:31:20'
|
|
613
|
+
"""
|
|
614
|
+
|
|
615
|
+
# Convert numeric timestamps to datetime first
|
|
616
|
+
if isinstance(timestamp, (int, float)):
|
|
617
|
+
dt = datetime.fromtimestamp(timestamp, timezone.utc)
|
|
618
|
+
return dt.strftime('%Y:%m:%d %H:%M:%S')
|
|
619
|
+
|
|
620
|
+
# Ensure we are working with a string from here on
|
|
621
|
+
if not isinstance(timestamp, str):
|
|
622
|
+
return str(timestamp)
|
|
623
|
+
|
|
624
|
+
# Remove ' UTC' suffix if present
|
|
625
|
+
timestamp_clean = timestamp.replace(' UTC', '').strip()
|
|
626
|
+
|
|
627
|
+
# Remove milliseconds if present (everything after the last dot)
|
|
628
|
+
if '.' in timestamp_clean:
|
|
629
|
+
timestamp_clean = timestamp_clean.split('.')[0]
|
|
630
|
+
|
|
631
|
+
# Parse the timestamp string and convert to desired format
|
|
632
|
+
try:
|
|
633
|
+
# Handle format: YYYY-MM-DD-HH:MM:SS
|
|
634
|
+
if timestamp_clean.count('-') >= 2:
|
|
635
|
+
# Replace first two dashes with colons for date part, third with space
|
|
636
|
+
parts = timestamp_clean.split('-')
|
|
637
|
+
if len(parts) >= 4:
|
|
638
|
+
# parts = ['2025', '10', '27', '19:31:20']
|
|
639
|
+
formatted = f"{parts[0]}:{parts[1]}:{parts[2]} {'-'.join(parts[3:])}"
|
|
640
|
+
return formatted
|
|
641
|
+
except Exception:
|
|
642
|
+
pass
|
|
643
|
+
|
|
644
|
+
# If parsing fails, return the cleaned string as-is
|
|
645
|
+
return timestamp_clean
|
|
646
|
+
|
|
617
647
|
def _get_current_timestamp_str(self, stream_info: Optional[Dict[str, Any]], precision=False, frame_id: Optional[str]=None) -> str:
|
|
618
648
|
"""Get formatted current timestamp based on stream type."""
|
|
649
|
+
|
|
619
650
|
if not stream_info:
|
|
620
651
|
return "00:00:00.00"
|
|
621
652
|
if precision:
|
|
@@ -625,15 +656,20 @@ class WeaponDetectionUseCase(BaseProcessor):
|
|
|
625
656
|
else:
|
|
626
657
|
start_time = stream_info.get("input_settings", {}).get("start_frame", 30)/stream_info.get("input_settings", {}).get("original_fps", 30)
|
|
627
658
|
stream_time_str = self._format_timestamp_for_video(start_time)
|
|
659
|
+
|
|
628
660
|
return self._format_timestamp(stream_info.get("input_settings", {}).get("stream_time", "NA"))
|
|
629
661
|
else:
|
|
630
662
|
return datetime.now(timezone.utc).strftime("%Y-%m-%d-%H:%M:%S.%f UTC")
|
|
663
|
+
|
|
631
664
|
if stream_info.get("input_settings", {}).get("start_frame", "na") != "na":
|
|
632
665
|
if frame_id:
|
|
633
666
|
start_time = int(frame_id)/stream_info.get("input_settings", {}).get("original_fps", 30)
|
|
634
667
|
else:
|
|
635
668
|
start_time = stream_info.get("input_settings", {}).get("start_frame", 30)/stream_info.get("input_settings", {}).get("original_fps", 30)
|
|
669
|
+
|
|
636
670
|
stream_time_str = self._format_timestamp_for_video(start_time)
|
|
671
|
+
|
|
672
|
+
|
|
637
673
|
return self._format_timestamp(stream_info.get("input_settings", {}).get("stream_time", "NA"))
|
|
638
674
|
else:
|
|
639
675
|
stream_time_str = stream_info.get("input_settings", {}).get("stream_info", {}).get("stream_time", "")
|
|
@@ -652,24 +688,62 @@ class WeaponDetectionUseCase(BaseProcessor):
|
|
|
652
688
|
"""Get formatted start timestamp for 'TOTAL SINCE' based on stream type."""
|
|
653
689
|
if not stream_info:
|
|
654
690
|
return "00:00:00"
|
|
691
|
+
|
|
655
692
|
if precision:
|
|
656
693
|
if self.start_timer is None:
|
|
657
|
-
|
|
694
|
+
candidate = stream_info.get("input_settings", {}).get("stream_time")
|
|
695
|
+
if not candidate or candidate == "NA":
|
|
696
|
+
candidate = datetime.now(timezone.utc).strftime("%Y-%m-%d-%H:%M:%S.%f UTC")
|
|
697
|
+
self.start_timer = candidate
|
|
658
698
|
return self._format_timestamp(self.start_timer)
|
|
659
699
|
elif stream_info.get("input_settings", {}).get("start_frame", "na") == 1:
|
|
660
|
-
|
|
700
|
+
candidate = stream_info.get("input_settings", {}).get("stream_time")
|
|
701
|
+
if not candidate or candidate == "NA":
|
|
702
|
+
candidate = datetime.now(timezone.utc).strftime("%Y-%m-%d-%H:%M:%S.%f UTC")
|
|
703
|
+
self.start_timer = candidate
|
|
661
704
|
return self._format_timestamp(self.start_timer)
|
|
662
705
|
else:
|
|
663
706
|
return self._format_timestamp(self.start_timer)
|
|
707
|
+
|
|
664
708
|
if self.start_timer is None:
|
|
665
|
-
|
|
709
|
+
# Prefer direct input_settings.stream_time if available and not NA
|
|
710
|
+
candidate = stream_info.get("input_settings", {}).get("stream_time")
|
|
711
|
+
if not candidate or candidate == "NA":
|
|
712
|
+
# Fallback to nested stream_info.stream_time used by current timestamp path
|
|
713
|
+
stream_time_str = stream_info.get("input_settings", {}).get("stream_info", {}).get("stream_time", "")
|
|
714
|
+
if stream_time_str:
|
|
715
|
+
try:
|
|
716
|
+
timestamp_str = stream_time_str.replace(" UTC", "")
|
|
717
|
+
dt = datetime.strptime(timestamp_str, "%Y-%m-%d-%H:%M:%S.%f")
|
|
718
|
+
self._tracking_start_time = dt.replace(tzinfo=timezone.utc).timestamp()
|
|
719
|
+
candidate = datetime.fromtimestamp(self._tracking_start_time, timezone.utc).strftime("%Y-%m-%d-%H:%M:%S.%f UTC")
|
|
720
|
+
except:
|
|
721
|
+
candidate = datetime.now(timezone.utc).strftime("%Y-%m-%d-%H:%M:%S.%f UTC")
|
|
722
|
+
else:
|
|
723
|
+
candidate = datetime.now(timezone.utc).strftime("%Y-%m-%d-%H:%M:%S.%f UTC")
|
|
724
|
+
self.start_timer = candidate
|
|
666
725
|
return self._format_timestamp(self.start_timer)
|
|
667
726
|
elif stream_info.get("input_settings", {}).get("start_frame", "na") == 1:
|
|
668
|
-
|
|
727
|
+
candidate = stream_info.get("input_settings", {}).get("stream_time")
|
|
728
|
+
if not candidate or candidate == "NA":
|
|
729
|
+
stream_time_str = stream_info.get("input_settings", {}).get("stream_info", {}).get("stream_time", "")
|
|
730
|
+
if stream_time_str:
|
|
731
|
+
try:
|
|
732
|
+
timestamp_str = stream_time_str.replace(" UTC", "")
|
|
733
|
+
dt = datetime.strptime(timestamp_str, "%Y-%m-%d-%H:%M:%S.%f")
|
|
734
|
+
ts = dt.replace(tzinfo=timezone.utc).timestamp()
|
|
735
|
+
candidate = datetime.fromtimestamp(ts, timezone.utc).strftime("%Y-%m-%d-%H:%M:%S.%f UTC")
|
|
736
|
+
except:
|
|
737
|
+
candidate = datetime.now(timezone.utc).strftime("%Y-%m-%d-%H:%M:%S.%f UTC")
|
|
738
|
+
else:
|
|
739
|
+
candidate = datetime.now(timezone.utc).strftime("%Y-%m-%d-%H:%M:%S.%f UTC")
|
|
740
|
+
self.start_timer = candidate
|
|
669
741
|
return self._format_timestamp(self.start_timer)
|
|
742
|
+
|
|
670
743
|
else:
|
|
671
|
-
if self.start_timer is not None:
|
|
744
|
+
if self.start_timer is not None and self.start_timer != "NA":
|
|
672
745
|
return self._format_timestamp(self.start_timer)
|
|
746
|
+
|
|
673
747
|
if self._tracking_start_time is None:
|
|
674
748
|
stream_time_str = stream_info.get("input_settings", {}).get("stream_info", {}).get("stream_time", "")
|
|
675
749
|
if stream_time_str:
|
|
@@ -681,10 +755,12 @@ class WeaponDetectionUseCase(BaseProcessor):
|
|
|
681
755
|
self._tracking_start_time = time.time()
|
|
682
756
|
else:
|
|
683
757
|
self._tracking_start_time = time.time()
|
|
758
|
+
|
|
684
759
|
dt = datetime.fromtimestamp(self._tracking_start_time, tz=timezone.utc)
|
|
685
760
|
dt = dt.replace(minute=0, second=0, microsecond=0)
|
|
686
761
|
return dt.strftime('%Y:%m:%d %H:%M:%S')
|
|
687
762
|
|
|
763
|
+
|
|
688
764
|
def _compute_iou(self, box1: Any, box2: Any) -> float:
|
|
689
765
|
"""Compute IoU between two bounding boxes."""
|
|
690
766
|
def _bbox_to_list(bbox):
|
|
@@ -28,9 +28,9 @@ matrice_analytics/post_processing/core/config.py,sha256=uyxWndO-DE9PeGD_h5K3TeB0
|
|
|
28
28
|
matrice_analytics/post_processing/core/config_utils.py,sha256=QuAS-_JKSoNOtfUWgr7Alf_wsqODzN2rHlQu-cHRK0s,34311
|
|
29
29
|
matrice_analytics/post_processing/face_reg/__init__.py,sha256=yntaiGlW9vdjBpPZQXNuovALihJPzRlFyUE88l3MhBA,1364
|
|
30
30
|
matrice_analytics/post_processing/face_reg/compare_similarity.py,sha256=NlFc8b2a74k0PqSFAbuM_fUbA1BT3pr3VUgvSqRpJzQ,23396
|
|
31
|
-
matrice_analytics/post_processing/face_reg/embedding_manager.py,sha256=
|
|
32
|
-
matrice_analytics/post_processing/face_reg/face_recognition.py,sha256=
|
|
33
|
-
matrice_analytics/post_processing/face_reg/face_recognition_client.py,sha256=
|
|
31
|
+
matrice_analytics/post_processing/face_reg/embedding_manager.py,sha256=3Rba94EcYWFK0D4el9JZ7fwqQ9kOyadrwK30lFmTP-k,44964
|
|
32
|
+
matrice_analytics/post_processing/face_reg/face_recognition.py,sha256=QpiIB0POkVknNr80o5_S3cCHUzGSvbEzO563vVxc0uo,109071
|
|
33
|
+
matrice_analytics/post_processing/face_reg/face_recognition_client.py,sha256=4RhneX-P1IEXsvMllNW3tA8XAAKnvlsZLwNvO8ehdYY,30344
|
|
34
34
|
matrice_analytics/post_processing/face_reg/people_activity_logging.py,sha256=vZbIvkK1h3h58ROeF0_ygF3lqr19O2h5222bN8XyIis,13675
|
|
35
35
|
matrice_analytics/post_processing/ocr/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
36
36
|
matrice_analytics/post_processing/ocr/easyocr_extractor.py,sha256=RMrRoGb2gMcJEGouQn8U9cCgCLXPT7qRa8liI4LNxFM,11555
|
|
@@ -116,7 +116,7 @@ matrice_analytics/post_processing/usecases/face_emotion.py,sha256=eRfqBdryB0uNoO
|
|
|
116
116
|
matrice_analytics/post_processing/usecases/face_recognition.py,sha256=T5xAuv6b9OrkmTmoXgZs4LZ5XUsbvp9xCpeLBwdu7eI,40231
|
|
117
117
|
matrice_analytics/post_processing/usecases/fashion_detection.py,sha256=f9gpzMDhIW-gyn46k9jgf8nY7YeoqAnTxGOzksabFbE,40457
|
|
118
118
|
matrice_analytics/post_processing/usecases/field_mapping.py,sha256=JDwYX8pd2W-waDvBh98Y_o_uchJu7wEYbFxOliA4Iq4,39822
|
|
119
|
-
matrice_analytics/post_processing/usecases/fire_detection.py,sha256
|
|
119
|
+
matrice_analytics/post_processing/usecases/fire_detection.py,sha256=-xB9K-2PSqsRWMeZ961aT5dhPzHlw0h8m3wuJotjOG0,54609
|
|
120
120
|
matrice_analytics/post_processing/usecases/flare_analysis.py,sha256=3nf4fUeUwlP_UII0h5fQkUGPXbr32ZnJjaM-dukNSP8,42680
|
|
121
121
|
matrice_analytics/post_processing/usecases/flower_segmentation.py,sha256=4I7qMx9Ztxg_hy9KTVX-3qBhAN-QwDt_Yigf9fFjLus,52017
|
|
122
122
|
matrice_analytics/post_processing/usecases/gas_leak_detection.py,sha256=KL2ft7fXvjTas-65-QgcJm3W8KBsrwF44qibSXjfaLc,40557
|
|
@@ -127,7 +127,7 @@ matrice_analytics/post_processing/usecases/leaf.py,sha256=cwgB1ZNxkQFtkk-thSJrkX
|
|
|
127
127
|
matrice_analytics/post_processing/usecases/leaf_disease.py,sha256=bkiLccTdf4KUq3he4eCpBlKXb5exr-WBhQ_oWQ7os68,36225
|
|
128
128
|
matrice_analytics/post_processing/usecases/leak_detection.py,sha256=oOCLLVMuXVeXPHyN8FUrD3U9JYJJwIz-5fcEMgvLdls,40531
|
|
129
129
|
matrice_analytics/post_processing/usecases/license_plate_detection.py,sha256=dsavd92-wnyXCNrCzaRj24zH7BVvLSa09HkYsrOXYDM,50806
|
|
130
|
-
matrice_analytics/post_processing/usecases/license_plate_monitoring.py,sha256=
|
|
130
|
+
matrice_analytics/post_processing/usecases/license_plate_monitoring.py,sha256=2fOfjqhpjw2Ql9pNaZ8phXXDvLcVjiXDuPo74dAXlI8,91409
|
|
131
131
|
matrice_analytics/post_processing/usecases/litter_monitoring.py,sha256=XaHAUGRBDJg_iVbu8hRMjTR-5TqrLj6ZNCRkInbzZTY,33255
|
|
132
132
|
matrice_analytics/post_processing/usecases/mask_detection.py,sha256=L_s6ZiT5zeXG-BsFcskb3HEG98DhLgqeMSDmCuwOteU,41501
|
|
133
133
|
matrice_analytics/post_processing/usecases/natural_disaster.py,sha256=ehxdPBoYcZWGVDOVn_mHFoz4lIE8LrveAkuXQj0n9XE,44253
|
|
@@ -162,7 +162,7 @@ matrice_analytics/post_processing/usecases/underwater_pollution_detection.py,sha
|
|
|
162
162
|
matrice_analytics/post_processing/usecases/vehicle_monitoring.py,sha256=QsO-coozfy29rY6NszwA6A7nFBOGysfMz5S5VVY7Beg,52849
|
|
163
163
|
matrice_analytics/post_processing/usecases/warehouse_object_segmentation.py,sha256=5uZXTJL_A3tUEN08T-_ZQpUoJ9gqbuuMc4z2mT4sMnQ,43753
|
|
164
164
|
matrice_analytics/post_processing/usecases/waterbody_segmentation.py,sha256=JsCxDEMB8s4WDcezfJDr2zrjM-TCjB9hxOztzSvWmpY,45268
|
|
165
|
-
matrice_analytics/post_processing/usecases/weapon_detection.py,sha256=
|
|
165
|
+
matrice_analytics/post_processing/usecases/weapon_detection.py,sha256=12fwL3IT26fxdq5aZWbZcrei6JGmhMWxal4q8brUv8E,40614
|
|
166
166
|
matrice_analytics/post_processing/usecases/weld_defect_detection.py,sha256=b0dAJGKUofbGrwHDJfIYb4pqmvp4Y23JK09Qb-34mxg,30209
|
|
167
167
|
matrice_analytics/post_processing/usecases/wildlife_monitoring.py,sha256=TMVHJ5GLezmqG7DywmqbLggqNXgpsb63MD7IR6kvDkk,43446
|
|
168
168
|
matrice_analytics/post_processing/usecases/windmill_maintenance.py,sha256=G1eqo3Z-HYmGJ6oeZYrpZwhpvqQ9Lc_T-6S7BLBXHeA,40498
|
|
@@ -189,8 +189,8 @@ matrice_analytics/post_processing/utils/format_utils.py,sha256=UTF7A5h9j0_S12xH9
|
|
|
189
189
|
matrice_analytics/post_processing/utils/geometry_utils.py,sha256=BWfdM6RsdJTTLR1GqkWfdwpjMEjTCJyuBxA4zVGKdfk,9623
|
|
190
190
|
matrice_analytics/post_processing/utils/smoothing_utils.py,sha256=78U-yucAcjUiZ0NIAc9NOUSIT0PWP1cqyIPA_Fdrjp0,14699
|
|
191
191
|
matrice_analytics/post_processing/utils/tracking_utils.py,sha256=rWxuotnJ3VLMHIBOud2KLcu4yZfDp7hVPWUtNAq_2xw,8288
|
|
192
|
-
matrice_analytics-0.1.
|
|
193
|
-
matrice_analytics-0.1.
|
|
194
|
-
matrice_analytics-0.1.
|
|
195
|
-
matrice_analytics-0.1.
|
|
196
|
-
matrice_analytics-0.1.
|
|
192
|
+
matrice_analytics-0.1.70.dist-info/licenses/LICENSE.txt,sha256=_uQUZpgO0mRYL5-fPoEvLSbNnLPv6OmbeEDCHXhK6Qc,1066
|
|
193
|
+
matrice_analytics-0.1.70.dist-info/METADATA,sha256=Kx834enthybusq864ukKPcMmcuCE9b6laG_8N3ESJbA,14378
|
|
194
|
+
matrice_analytics-0.1.70.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
195
|
+
matrice_analytics-0.1.70.dist-info/top_level.txt,sha256=STAPEU-e-rWTerXaspdi76T_eVRSrEfFpURSP7_Dt8E,18
|
|
196
|
+
matrice_analytics-0.1.70.dist-info/RECORD,,
|
|
File without changes
|
{matrice_analytics-0.1.60.dist-info → matrice_analytics-0.1.70.dist-info}/licenses/LICENSE.txt
RENAMED
|
File without changes
|
|
File without changes
|