matrice-analytics 0.1.70__py3-none-any.whl → 0.1.89__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- matrice_analytics/post_processing/config.py +2 -2
- matrice_analytics/post_processing/core/base.py +1 -1
- matrice_analytics/post_processing/face_reg/face_recognition.py +871 -190
- matrice_analytics/post_processing/face_reg/face_recognition_client.py +55 -25
- matrice_analytics/post_processing/usecases/advanced_customer_service.py +908 -498
- matrice_analytics/post_processing/usecases/color_detection.py +18 -18
- matrice_analytics/post_processing/usecases/customer_service.py +356 -9
- matrice_analytics/post_processing/usecases/fire_detection.py +147 -9
- matrice_analytics/post_processing/usecases/license_plate_monitoring.py +549 -41
- matrice_analytics/post_processing/usecases/people_counting.py +11 -11
- matrice_analytics/post_processing/usecases/vehicle_monitoring.py +34 -34
- matrice_analytics/post_processing/utils/alert_instance_utils.py +950 -0
- matrice_analytics/post_processing/utils/business_metrics_manager_utils.py +1245 -0
- matrice_analytics/post_processing/utils/incident_manager_utils.py +1657 -0
- {matrice_analytics-0.1.70.dist-info → matrice_analytics-0.1.89.dist-info}/METADATA +1 -1
- {matrice_analytics-0.1.70.dist-info → matrice_analytics-0.1.89.dist-info}/RECORD +19 -16
- {matrice_analytics-0.1.70.dist-info → matrice_analytics-0.1.89.dist-info}/WHEEL +0 -0
- {matrice_analytics-0.1.70.dist-info → matrice_analytics-0.1.89.dist-info}/licenses/LICENSE.txt +0 -0
- {matrice_analytics-0.1.70.dist-info → matrice_analytics-0.1.89.dist-info}/top_level.txt +0 -0
|
@@ -26,7 +26,10 @@ Configuration options:
|
|
|
26
26
|
import subprocess
|
|
27
27
|
import logging
|
|
28
28
|
import asyncio
|
|
29
|
+
import json
|
|
29
30
|
import os
|
|
31
|
+
import re
|
|
32
|
+
from pathlib import Path
|
|
30
33
|
log_file = open("pip_jetson_btii.log", "w")
|
|
31
34
|
cmd = ["pip", "install", "httpx"]
|
|
32
35
|
subprocess.run(
|
|
@@ -37,7 +40,7 @@ subprocess.run(
|
|
|
37
40
|
)
|
|
38
41
|
log_file.close()
|
|
39
42
|
|
|
40
|
-
from typing import Any, Dict, List, Optional, Tuple
|
|
43
|
+
from typing import Any, Dict, List, Optional, Tuple, NamedTuple
|
|
41
44
|
import time
|
|
42
45
|
import base64
|
|
43
46
|
import cv2
|
|
@@ -46,6 +49,20 @@ import threading
|
|
|
46
49
|
from datetime import datetime, timezone
|
|
47
50
|
from collections import deque
|
|
48
51
|
|
|
52
|
+
try:
|
|
53
|
+
from matrice_common.session import Session
|
|
54
|
+
HAS_MATRICE_SESSION = True
|
|
55
|
+
except ImportError:
|
|
56
|
+
Session = None
|
|
57
|
+
HAS_MATRICE_SESSION = False
|
|
58
|
+
|
|
59
|
+
try:
|
|
60
|
+
import redis.asyncio as aioredis
|
|
61
|
+
HAS_AIREDIS = True
|
|
62
|
+
except ImportError:
|
|
63
|
+
aioredis = None
|
|
64
|
+
HAS_AIREDIS = False
|
|
65
|
+
|
|
49
66
|
from ..core.base import (
|
|
50
67
|
BaseProcessor,
|
|
51
68
|
ProcessingContext,
|
|
@@ -68,6 +85,7 @@ from .embedding_manager import EmbeddingManager, EmbeddingConfig
|
|
|
68
85
|
|
|
69
86
|
# ---- Lightweight identity tracking and temporal smoothing (adapted from compare_similarity.py) ---- #
|
|
70
87
|
from collections import deque, defaultdict
|
|
88
|
+
from matrice_common.session import Session
|
|
71
89
|
|
|
72
90
|
|
|
73
91
|
|
|
@@ -83,6 +101,543 @@ def _normalize_embedding(vec: List[float]) -> List[float]:
|
|
|
83
101
|
return arr.tolist()
|
|
84
102
|
|
|
85
103
|
|
|
104
|
+
class RedisFaceMatchResult(NamedTuple):
|
|
105
|
+
staff_id: Optional[str]
|
|
106
|
+
person_name: str
|
|
107
|
+
confidence: float
|
|
108
|
+
employee_id: Optional[str]
|
|
109
|
+
raw: Dict[str, Any]
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
class RedisFaceMatcher:
|
|
113
|
+
"""Handles Redis-based face similarity search."""
|
|
114
|
+
|
|
115
|
+
ACTION_ID_PATTERN = re.compile(r"^[0-9a-f]{8,}$", re.IGNORECASE)
|
|
116
|
+
|
|
117
|
+
def __init__(
|
|
118
|
+
self,
|
|
119
|
+
session=None,
|
|
120
|
+
logger: Optional[logging.Logger] = None,
|
|
121
|
+
redis_url: Optional[str] = None,
|
|
122
|
+
face_client=None,
|
|
123
|
+
) -> None:
|
|
124
|
+
self.logger = logger or logging.getLogger(__name__)
|
|
125
|
+
self._session = session
|
|
126
|
+
self.face_client = face_client
|
|
127
|
+
self.redis_url = (
|
|
128
|
+
redis_url
|
|
129
|
+
or os.getenv("FACE_RECOG_REDIS_URL")
|
|
130
|
+
or os.getenv("REDIS_URL")
|
|
131
|
+
)
|
|
132
|
+
self.stream_name = os.getenv(
|
|
133
|
+
"FACE_RECOG_REDIS_STREAM", "facial_detection_stream"
|
|
134
|
+
)
|
|
135
|
+
self.default_min_confidence = float(
|
|
136
|
+
os.getenv("FACE_RECOG_REDIS_MIN_CONFIDENCE", "0.01")
|
|
137
|
+
)
|
|
138
|
+
self.response_timeout = (
|
|
139
|
+
float(os.getenv("FACE_RECOG_REDIS_RESPONSE_TIMEOUT_MS", "200")) / 1000.0 # Reduced from 600ms to 200ms for faster failure
|
|
140
|
+
)
|
|
141
|
+
self.poll_interval = (
|
|
142
|
+
float(os.getenv("FACE_RECOG_REDIS_POLL_INTERVAL_MS", "5")) / 1000.0 # Reduced from 20ms to 5ms for faster polling
|
|
143
|
+
)
|
|
144
|
+
self.stream_maxlen = int(
|
|
145
|
+
os.getenv("FACE_RECOG_REDIS_STREAM_MAXLEN", "5000")
|
|
146
|
+
)
|
|
147
|
+
self._redis_client = None # type: ignore[assignment]
|
|
148
|
+
self._redis_connection_params: Optional[Dict[str, Any]] = None
|
|
149
|
+
self._app_deployment_id = os.getenv("APP_DEPLOYMENT_ID")
|
|
150
|
+
self._action_id = (
|
|
151
|
+
os.getenv("ACTION_ID")
|
|
152
|
+
or os.getenv("MATRISE_ACTION_ID")
|
|
153
|
+
or self._discover_action_id()
|
|
154
|
+
)
|
|
155
|
+
self._redis_server_id = os.getenv("REDIS_SERVER_ID")
|
|
156
|
+
self._app_dep_lock = asyncio.Lock()
|
|
157
|
+
self._session_lock = asyncio.Lock()
|
|
158
|
+
self._redis_lock = asyncio.Lock()
|
|
159
|
+
self._redis_warning_logged = False
|
|
160
|
+
|
|
161
|
+
def is_available(self) -> bool:
|
|
162
|
+
return HAS_AIREDIS
|
|
163
|
+
|
|
164
|
+
async def match_embedding(
|
|
165
|
+
self,
|
|
166
|
+
embedding: List[float],
|
|
167
|
+
search_id: Optional[str],
|
|
168
|
+
location: str = "",
|
|
169
|
+
min_confidence: Optional[float] = None,
|
|
170
|
+
) -> Optional[RedisFaceMatchResult]:
|
|
171
|
+
"""Send embedding to Redis stream and wait for match result."""
|
|
172
|
+
if not HAS_AIREDIS:
|
|
173
|
+
if not self._redis_warning_logged:
|
|
174
|
+
self.logger.warning(
|
|
175
|
+
"redis.asyncio not available; skipping Redis face matcher flow"
|
|
176
|
+
)
|
|
177
|
+
self._redis_warning_logged = True
|
|
178
|
+
return None
|
|
179
|
+
|
|
180
|
+
embedding_list = self._prepare_embedding_list(embedding)
|
|
181
|
+
if not embedding_list:
|
|
182
|
+
self.logger.warning(f"Empty embedding list for search_id={search_id}, cannot send to Redis")
|
|
183
|
+
print(f"WARNING: Empty embedding list for search_id={search_id}, cannot send to Redis")
|
|
184
|
+
return None
|
|
185
|
+
|
|
186
|
+
if len(embedding_list) == 0:
|
|
187
|
+
self.logger.warning(f"Embedding list has zero length for search_id={search_id}")
|
|
188
|
+
print(f"WARNING: Embedding list has zero length for search_id={search_id}")
|
|
189
|
+
return None
|
|
190
|
+
|
|
191
|
+
app_dep_id = await self._ensure_app_deployment_id()
|
|
192
|
+
if not app_dep_id:
|
|
193
|
+
return None
|
|
194
|
+
|
|
195
|
+
redis_client = await self._ensure_redis_client()
|
|
196
|
+
if redis_client is None:
|
|
197
|
+
return None
|
|
198
|
+
|
|
199
|
+
resolved_search_id = str(search_id or self._generate_search_id())
|
|
200
|
+
payload = {
|
|
201
|
+
"appDepId": app_dep_id,
|
|
202
|
+
"searchId": resolved_search_id,
|
|
203
|
+
"embedding": embedding_list,
|
|
204
|
+
"location": location or "",
|
|
205
|
+
"minConfidence": float(
|
|
206
|
+
min_confidence if min_confidence is not None else self.default_min_confidence
|
|
207
|
+
),
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
try:
|
|
211
|
+
self.logger.debug(
|
|
212
|
+
f"Sending embedding to Redis stream {self.stream_name} with search_id={resolved_search_id}, "
|
|
213
|
+
f"embedding_len={len(embedding_list)}, minConfidence={payload.get('minConfidence')}"
|
|
214
|
+
)
|
|
215
|
+
await redis_client.xadd(
|
|
216
|
+
self.stream_name,
|
|
217
|
+
{"data": json.dumps(payload, separators=(",", ":"))},
|
|
218
|
+
maxlen=self.stream_maxlen,
|
|
219
|
+
approximate=True,
|
|
220
|
+
)
|
|
221
|
+
self.logger.debug(f"Successfully sent embedding to Redis stream for search_id={resolved_search_id}")
|
|
222
|
+
except Exception as exc:
|
|
223
|
+
self.logger.error(
|
|
224
|
+
"Failed to enqueue face embedding to Redis stream %s: %s",
|
|
225
|
+
self.stream_name,
|
|
226
|
+
exc,
|
|
227
|
+
exc_info=True,
|
|
228
|
+
)
|
|
229
|
+
print(f"ERROR: Failed to send to Redis stream {self.stream_name}: {exc}")
|
|
230
|
+
return None
|
|
231
|
+
|
|
232
|
+
result_key = f"{resolved_search_id}_{app_dep_id}"
|
|
233
|
+
deadline = time.monotonic() + self.response_timeout
|
|
234
|
+
poll_count = 0
|
|
235
|
+
start_poll_time = time.monotonic()
|
|
236
|
+
|
|
237
|
+
self.logger.debug(f"Waiting for Redis response with key={result_key}, timeout={self.response_timeout:.3f}s")
|
|
238
|
+
|
|
239
|
+
# Poll loop - check immediately first, then with intervals
|
|
240
|
+
while time.monotonic() < deadline:
|
|
241
|
+
try:
|
|
242
|
+
raw_value = await redis_client.get(result_key)
|
|
243
|
+
poll_count += 1
|
|
244
|
+
except Exception as exc:
|
|
245
|
+
self.logger.error(
|
|
246
|
+
"Failed to read Redis result for key %s: %s",
|
|
247
|
+
result_key,
|
|
248
|
+
exc,
|
|
249
|
+
exc_info=True,
|
|
250
|
+
)
|
|
251
|
+
print(f"ERROR: Failed to read Redis result for key {result_key}: {exc}")
|
|
252
|
+
return None
|
|
253
|
+
|
|
254
|
+
if raw_value:
|
|
255
|
+
await redis_client.delete(result_key)
|
|
256
|
+
try:
|
|
257
|
+
parsed = json.loads(raw_value)
|
|
258
|
+
except Exception as exc:
|
|
259
|
+
parsed = json.loads(raw_value)
|
|
260
|
+
self.logger.error(
|
|
261
|
+
"Unable to parse Redis face match response: %s",
|
|
262
|
+
exc,
|
|
263
|
+
exc_info=True,
|
|
264
|
+
)
|
|
265
|
+
print(f"ERROR: Unable to parse Redis face match response: {exc}")
|
|
266
|
+
#return None
|
|
267
|
+
|
|
268
|
+
# Log and print the raw Redis response for debugging
|
|
269
|
+
self.logger.info(f"Redis raw response for search_id={resolved_search_id}: {parsed}")
|
|
270
|
+
print(f"Redis raw response for search_id={resolved_search_id}: {parsed}")
|
|
271
|
+
|
|
272
|
+
match_data = None
|
|
273
|
+
if isinstance(parsed, list) and parsed:
|
|
274
|
+
match_data = parsed[0]
|
|
275
|
+
self.logger.info(f"Redis response is array, extracted first element: {match_data}")
|
|
276
|
+
print(f"Redis response is array, extracted first element: {match_data}")
|
|
277
|
+
elif isinstance(parsed, dict):
|
|
278
|
+
match_data = parsed
|
|
279
|
+
self.logger.info(f"Redis response is dict: {match_data}")
|
|
280
|
+
print(f"Redis response is dict: {match_data}")
|
|
281
|
+
else:
|
|
282
|
+
self.logger.warning(f"Redis response is neither list nor dict: {type(parsed)}, value: {parsed}")
|
|
283
|
+
print(f"WARNING: Redis response is neither list nor dict: {type(parsed)}, value: {parsed}")
|
|
284
|
+
|
|
285
|
+
if not isinstance(match_data, dict):
|
|
286
|
+
self.logger.warning(f"match_data is not a dict after extraction: {type(match_data)}, value: {match_data}")
|
|
287
|
+
print(f"WARNING: match_data is not a dict after extraction: {type(match_data)}, value: {match_data}")
|
|
288
|
+
return None
|
|
289
|
+
|
|
290
|
+
staff_id = match_data.get("staffId") or match_data.get("staff_id")
|
|
291
|
+
if not staff_id:
|
|
292
|
+
self.logger.warning(f"No staffId found in match_data: {match_data}")
|
|
293
|
+
print(f"WARNING: No staffId found in match_data: {match_data}")
|
|
294
|
+
return None
|
|
295
|
+
person_name = str(match_data.get("name") or "Unknown")
|
|
296
|
+
confidence = float(match_data.get("conf") or match_data.get("confidence") or 0.0)
|
|
297
|
+
employee_id = match_data.get("employeeId") or match_data.get("embeddingId")
|
|
298
|
+
|
|
299
|
+
# Log the extracted values
|
|
300
|
+
self.logger.info(
|
|
301
|
+
f"Redis match extracted - staff_id={staff_id}, person_name={person_name}, "
|
|
302
|
+
f"confidence={confidence}, employee_id={employee_id}"
|
|
303
|
+
)
|
|
304
|
+
print(
|
|
305
|
+
f"Redis match extracted - staff_id={staff_id}, person_name={person_name}, "
|
|
306
|
+
f"confidence={confidence}, employee_id={employee_id}"
|
|
307
|
+
)
|
|
308
|
+
|
|
309
|
+
# Check confidence threshold before returning
|
|
310
|
+
min_conf = float(min_confidence if min_confidence is not None else self.default_min_confidence)
|
|
311
|
+
if confidence < min_conf:
|
|
312
|
+
self.logger.debug(
|
|
313
|
+
f"Redis match confidence {confidence:.3f} below threshold {min_conf:.3f}, rejecting"
|
|
314
|
+
)
|
|
315
|
+
print(f"Redis match confidence {confidence:.3f} below threshold {min_conf:.3f}, rejecting")
|
|
316
|
+
return None
|
|
317
|
+
|
|
318
|
+
result = RedisFaceMatchResult(
|
|
319
|
+
staff_id=str(staff_id),
|
|
320
|
+
person_name=person_name,
|
|
321
|
+
confidence=round(confidence, 3),
|
|
322
|
+
employee_id=str(employee_id) if employee_id else None,
|
|
323
|
+
raw=match_data,
|
|
324
|
+
)
|
|
325
|
+
|
|
326
|
+
poll_time = (time.monotonic() - start_poll_time) * 1000.0
|
|
327
|
+
self.logger.info(
|
|
328
|
+
f"Redis match result created (polls={poll_count}, poll_time={poll_time:.2f}ms): "
|
|
329
|
+
f"staff_id={result.staff_id}, name={result.person_name}, conf={result.confidence}"
|
|
330
|
+
)
|
|
331
|
+
print(
|
|
332
|
+
f"Redis match result created (polls={poll_count}, poll_time={poll_time:.2f}ms): "
|
|
333
|
+
f"staff_id={result.staff_id}, name={result.person_name}, conf={result.confidence}"
|
|
334
|
+
)
|
|
335
|
+
|
|
336
|
+
return result
|
|
337
|
+
|
|
338
|
+
# Use shorter sleep for faster response (already reduced poll_interval to 5ms)
|
|
339
|
+
await asyncio.sleep(self.poll_interval)
|
|
340
|
+
|
|
341
|
+
poll_time = (time.monotonic() - start_poll_time) * 1000.0
|
|
342
|
+
self.logger.warning(
|
|
343
|
+
"Timed out waiting for Redis face match result for key %s (timeout=%.3fs, polls=%d, poll_time=%.2fms)",
|
|
344
|
+
result_key,
|
|
345
|
+
self.response_timeout,
|
|
346
|
+
poll_count,
|
|
347
|
+
poll_time,
|
|
348
|
+
)
|
|
349
|
+
print(
|
|
350
|
+
f"WARNING: Redis timeout for search_id={resolved_search_id} "
|
|
351
|
+
f"(timeout={self.response_timeout:.3f}s, polls={poll_count}, poll_time={poll_time:.2f}ms)"
|
|
352
|
+
)
|
|
353
|
+
return None
|
|
354
|
+
|
|
355
|
+
def _prepare_embedding_list(self, embedding: List[float]) -> List[float]:
|
|
356
|
+
if isinstance(embedding, np.ndarray):
|
|
357
|
+
return embedding.astype(np.float32).tolist()
|
|
358
|
+
prepared = []
|
|
359
|
+
try:
|
|
360
|
+
for value in embedding:
|
|
361
|
+
prepared.append(float(value))
|
|
362
|
+
except Exception:
|
|
363
|
+
self.logger.debug("Failed to convert embedding to float list", exc_info=True)
|
|
364
|
+
return []
|
|
365
|
+
return prepared
|
|
366
|
+
|
|
367
|
+
def _generate_search_id(self) -> str:
|
|
368
|
+
return f"face_{int(time.time() * 1000)}"
|
|
369
|
+
|
|
370
|
+
async def _ensure_app_deployment_id(self) -> Optional[str]:
|
|
371
|
+
if self._app_deployment_id:
|
|
372
|
+
return self._app_deployment_id
|
|
373
|
+
|
|
374
|
+
async with self._app_dep_lock:
|
|
375
|
+
if self._app_deployment_id:
|
|
376
|
+
return self._app_deployment_id
|
|
377
|
+
|
|
378
|
+
action_id = self._action_id or self._discover_action_id()
|
|
379
|
+
if not action_id:
|
|
380
|
+
self.logger.warning(
|
|
381
|
+
"Unable to determine action_id for Redis face matcher"
|
|
382
|
+
)
|
|
383
|
+
return None
|
|
384
|
+
|
|
385
|
+
session = await self._ensure_session()
|
|
386
|
+
if session is None:
|
|
387
|
+
return None
|
|
388
|
+
|
|
389
|
+
response = await asyncio.to_thread(
|
|
390
|
+
self._fetch_action_details_sync, session, action_id
|
|
391
|
+
)
|
|
392
|
+
if not response or not response.get("success", False):
|
|
393
|
+
self.logger.warning(
|
|
394
|
+
"Failed to fetch action details for action_id=%s", action_id
|
|
395
|
+
)
|
|
396
|
+
return None
|
|
397
|
+
|
|
398
|
+
action_doc = response.get("data", {})
|
|
399
|
+
action_details = action_doc.get("actionDetails", {})
|
|
400
|
+
app_dep_id = (
|
|
401
|
+
action_details.get("app_deployment_id")
|
|
402
|
+
or action_details.get("appDepId")
|
|
403
|
+
)
|
|
404
|
+
redis_server_id = (
|
|
405
|
+
action_details.get("redis_server_id")
|
|
406
|
+
or action_details.get("redisServerId")
|
|
407
|
+
or action_details.get("redis_serverid")
|
|
408
|
+
or action_details.get("redisServerID")
|
|
409
|
+
)
|
|
410
|
+
if not app_dep_id:
|
|
411
|
+
self.logger.warning(
|
|
412
|
+
"app_deployment_id missing in action details for action_id=%s",
|
|
413
|
+
action_id,
|
|
414
|
+
)
|
|
415
|
+
return None
|
|
416
|
+
|
|
417
|
+
self._app_deployment_id = str(app_dep_id)
|
|
418
|
+
if redis_server_id:
|
|
419
|
+
self._redis_server_id = str(redis_server_id)
|
|
420
|
+
self.logger.info(
|
|
421
|
+
"Resolved app deployment id %s for action_id=%s",
|
|
422
|
+
self._app_deployment_id,
|
|
423
|
+
action_id,
|
|
424
|
+
)
|
|
425
|
+
return self._app_deployment_id
|
|
426
|
+
|
|
427
|
+
async def _ensure_session(self):
|
|
428
|
+
if self._session or not HAS_MATRICE_SESSION:
|
|
429
|
+
if not self._session and not HAS_MATRICE_SESSION:
|
|
430
|
+
self.logger.warning(
|
|
431
|
+
"matrice_common.session unavailable; cannot create RPC session for Redis matcher"
|
|
432
|
+
)
|
|
433
|
+
return self._session
|
|
434
|
+
|
|
435
|
+
async with self._session_lock:
|
|
436
|
+
if self._session:
|
|
437
|
+
return self._session
|
|
438
|
+
|
|
439
|
+
access_key = os.getenv("MATRICE_ACCESS_KEY_ID")
|
|
440
|
+
secret_key = os.getenv("MATRICE_SECRET_ACCESS_KEY")
|
|
441
|
+
account_number = os.getenv("MATRICE_ACCOUNT_NUMBER", "")
|
|
442
|
+
|
|
443
|
+
if not access_key or not secret_key:
|
|
444
|
+
self.logger.warning(
|
|
445
|
+
"Missing Matrice credentials; cannot initialize session for Redis matcher"
|
|
446
|
+
)
|
|
447
|
+
return None
|
|
448
|
+
|
|
449
|
+
try:
|
|
450
|
+
self._session = Session(
|
|
451
|
+
account_number=account_number,
|
|
452
|
+
access_key=access_key,
|
|
453
|
+
secret_key=secret_key,
|
|
454
|
+
)
|
|
455
|
+
self.logger.info("Initialized Matrice session for Redis face matcher")
|
|
456
|
+
except Exception as exc:
|
|
457
|
+
self.logger.error(
|
|
458
|
+
"Failed to initialize Matrice session for Redis matcher: %s",
|
|
459
|
+
exc,
|
|
460
|
+
exc_info=True,
|
|
461
|
+
)
|
|
462
|
+
self._session = None
|
|
463
|
+
|
|
464
|
+
return self._session
|
|
465
|
+
|
|
466
|
+
async def _ensure_redis_client(self):
|
|
467
|
+
if self._redis_client:
|
|
468
|
+
return self._redis_client
|
|
469
|
+
|
|
470
|
+
async with self._redis_lock:
|
|
471
|
+
if self._redis_client:
|
|
472
|
+
return self._redis_client
|
|
473
|
+
|
|
474
|
+
if not self.redis_url:
|
|
475
|
+
host = os.getenv("FACE_RECOG_REDIS_HOST")
|
|
476
|
+
port = os.getenv("FACE_RECOG_REDIS_PORT")
|
|
477
|
+
if host and port:
|
|
478
|
+
self.redis_url = f"redis://{host}:{port}/0"
|
|
479
|
+
|
|
480
|
+
if self.redis_url:
|
|
481
|
+
try:
|
|
482
|
+
self._redis_client = aioredis.from_url(
|
|
483
|
+
self.redis_url,
|
|
484
|
+
decode_responses=True,
|
|
485
|
+
health_check_interval=30,
|
|
486
|
+
)
|
|
487
|
+
self.logger.info(
|
|
488
|
+
"Connected Redis face matcher client to %s (stream=%s)",
|
|
489
|
+
self.redis_url,
|
|
490
|
+
self.stream_name,
|
|
491
|
+
)
|
|
492
|
+
return self._redis_client
|
|
493
|
+
except Exception as exc:
|
|
494
|
+
self.logger.error(
|
|
495
|
+
"Failed to connect to Redis at %s: %s",
|
|
496
|
+
self.redis_url,
|
|
497
|
+
exc,
|
|
498
|
+
exc_info=True,
|
|
499
|
+
)
|
|
500
|
+
self._redis_client = None
|
|
501
|
+
|
|
502
|
+
conn_params = await self._ensure_redis_connection_params()
|
|
503
|
+
if not conn_params:
|
|
504
|
+
self.logger.error(
|
|
505
|
+
"Redis connection parameters unavailable. Configure FACE_RECOG_REDIS_URL or ensure redis_server_id is set."
|
|
506
|
+
)
|
|
507
|
+
return None
|
|
508
|
+
|
|
509
|
+
try:
|
|
510
|
+
self._redis_client = aioredis.Redis(
|
|
511
|
+
host=conn_params.get("host"),
|
|
512
|
+
port=conn_params.get("port", 6379),
|
|
513
|
+
username=conn_params.get("username"),
|
|
514
|
+
password=conn_params.get("password") or None,
|
|
515
|
+
db=conn_params.get("db", 0),
|
|
516
|
+
ssl=conn_params.get("ssl", False),
|
|
517
|
+
decode_responses=True,
|
|
518
|
+
socket_connect_timeout=conn_params.get("connection_timeout", 120),
|
|
519
|
+
socket_timeout=conn_params.get("socket_timeout", 120),
|
|
520
|
+
retry_on_timeout=True,
|
|
521
|
+
health_check_interval=30,
|
|
522
|
+
)
|
|
523
|
+
self.logger.info(
|
|
524
|
+
"Connected Redis face matcher client to %s:%s (db=%s, stream=%s)",
|
|
525
|
+
conn_params.get("host"),
|
|
526
|
+
conn_params.get("port"),
|
|
527
|
+
conn_params.get("db"),
|
|
528
|
+
self.stream_name,
|
|
529
|
+
)
|
|
530
|
+
except Exception as exc:
|
|
531
|
+
self.logger.error(
|
|
532
|
+
"Failed to create Redis client with fetched parameters: %s",
|
|
533
|
+
exc,
|
|
534
|
+
exc_info=True,
|
|
535
|
+
)
|
|
536
|
+
self._redis_client = None
|
|
537
|
+
|
|
538
|
+
return self._redis_client
|
|
539
|
+
|
|
540
|
+
async def _ensure_redis_connection_params(self) -> Optional[Dict[str, Any]]:
|
|
541
|
+
if self._redis_connection_params:
|
|
542
|
+
return self._redis_connection_params
|
|
543
|
+
|
|
544
|
+
if not self.face_client:
|
|
545
|
+
self.logger.warning(
|
|
546
|
+
"Cannot fetch Redis connection parameters without face_client"
|
|
547
|
+
)
|
|
548
|
+
return None
|
|
549
|
+
|
|
550
|
+
await self._ensure_app_deployment_id()
|
|
551
|
+
|
|
552
|
+
try:
|
|
553
|
+
response = await self.face_client.get_redis_details()
|
|
554
|
+
except Exception as exc:
|
|
555
|
+
self.logger.error(
|
|
556
|
+
"Failed to fetch Redis details from facial recognition server: %s",
|
|
557
|
+
exc,
|
|
558
|
+
exc_info=True,
|
|
559
|
+
)
|
|
560
|
+
return None
|
|
561
|
+
|
|
562
|
+
if not response or not response.get("success", False):
|
|
563
|
+
self.logger.warning(
|
|
564
|
+
"Redis details API returned failure: %s",
|
|
565
|
+
response,
|
|
566
|
+
)
|
|
567
|
+
return None
|
|
568
|
+
|
|
569
|
+
data = response.get("data", {})
|
|
570
|
+
host = data.get("REDIS_IP")
|
|
571
|
+
port = data.get("REDIS_PORT")
|
|
572
|
+
password = data.get("REDIS_PASSWORD")
|
|
573
|
+
|
|
574
|
+
if not host or not port:
|
|
575
|
+
self.logger.warning(
|
|
576
|
+
"Redis details missing REDIS_IP or REDIS_PORT"
|
|
577
|
+
)
|
|
578
|
+
return None
|
|
579
|
+
|
|
580
|
+
try:
|
|
581
|
+
params = {
|
|
582
|
+
"host": host,
|
|
583
|
+
"port": int(port),
|
|
584
|
+
"password": password or None,
|
|
585
|
+
"username": None,
|
|
586
|
+
"db": 0,
|
|
587
|
+
"connection_timeout": 120,
|
|
588
|
+
"socket_timeout": 120,
|
|
589
|
+
"ssl": False,
|
|
590
|
+
}
|
|
591
|
+
except Exception as exc:
|
|
592
|
+
self.logger.error(
|
|
593
|
+
"Invalid Redis connection config: %s",
|
|
594
|
+
exc,
|
|
595
|
+
exc_info=True,
|
|
596
|
+
)
|
|
597
|
+
return None
|
|
598
|
+
|
|
599
|
+
self._redis_connection_params = params
|
|
600
|
+
return self._redis_connection_params
|
|
601
|
+
|
|
602
|
+
@classmethod
|
|
603
|
+
def _discover_action_id(cls) -> Optional[str]:
|
|
604
|
+
candidates: List[str] = []
|
|
605
|
+
try:
|
|
606
|
+
cwd = Path.cwd()
|
|
607
|
+
candidates.append(cwd.name)
|
|
608
|
+
for parent in cwd.parents:
|
|
609
|
+
candidates.append(parent.name)
|
|
610
|
+
except Exception:
|
|
611
|
+
pass
|
|
612
|
+
|
|
613
|
+
try:
|
|
614
|
+
usr_src = Path("/usr/src")
|
|
615
|
+
if usr_src.exists():
|
|
616
|
+
for child in usr_src.iterdir():
|
|
617
|
+
if child.is_dir():
|
|
618
|
+
candidates.append(child.name)
|
|
619
|
+
except Exception:
|
|
620
|
+
pass
|
|
621
|
+
|
|
622
|
+
for candidate in candidates:
|
|
623
|
+
if candidate and len(candidate) >= 8 and cls.ACTION_ID_PATTERN.match(candidate):
|
|
624
|
+
return candidate
|
|
625
|
+
return None
|
|
626
|
+
|
|
627
|
+
def _fetch_action_details_sync(self, session, action_id: str) -> Optional[Dict[str, Any]]:
|
|
628
|
+
url = f"/v1/actions/action/{action_id}/details"
|
|
629
|
+
try:
|
|
630
|
+
return session.rpc.get(url)
|
|
631
|
+
except Exception as exc:
|
|
632
|
+
self.logger.error(
|
|
633
|
+
"Failed to fetch action details for action_id=%s: %s",
|
|
634
|
+
action_id,
|
|
635
|
+
exc,
|
|
636
|
+
exc_info=True,
|
|
637
|
+
)
|
|
638
|
+
return None
|
|
639
|
+
|
|
640
|
+
|
|
86
641
|
## Removed FaceTracker fallback (using AdvancedTracker only)
|
|
87
642
|
|
|
88
643
|
|
|
@@ -97,8 +652,9 @@ class TemporalIdentityManager:
|
|
|
97
652
|
def __init__(
|
|
98
653
|
self,
|
|
99
654
|
face_client: FacialRecognitionClient,
|
|
100
|
-
embedding_manager
|
|
101
|
-
|
|
655
|
+
embedding_manager=None,
|
|
656
|
+
redis_matcher: Optional[RedisFaceMatcher] = None,
|
|
657
|
+
recognition_threshold: float = 0.3,
|
|
102
658
|
history_size: int = 20,
|
|
103
659
|
unknown_patience: int = 7,
|
|
104
660
|
switch_patience: int = 5,
|
|
@@ -107,12 +663,14 @@ class TemporalIdentityManager:
|
|
|
107
663
|
self.logger = logging.getLogger(__name__)
|
|
108
664
|
self.face_client = face_client
|
|
109
665
|
self.embedding_manager = embedding_manager
|
|
666
|
+
self.redis_matcher = redis_matcher
|
|
110
667
|
self.threshold = float(recognition_threshold)
|
|
111
668
|
self.history_size = int(history_size)
|
|
112
669
|
self.unknown_patience = int(unknown_patience)
|
|
113
670
|
self.switch_patience = int(switch_patience)
|
|
114
671
|
self.fallback_margin = float(fallback_margin)
|
|
115
672
|
self.tracks: Dict[Any, Dict[str, object]] = {}
|
|
673
|
+
self.emb_run=False
|
|
116
674
|
|
|
117
675
|
def _ensure_track(self, track_id: Any) -> None:
|
|
118
676
|
if track_id not in self.tracks:
|
|
@@ -128,7 +686,13 @@ class TemporalIdentityManager:
|
|
|
128
686
|
"streaks": defaultdict(int), # staff_id -> consecutive frames
|
|
129
687
|
}
|
|
130
688
|
|
|
131
|
-
async def _compute_best_identity(
|
|
689
|
+
async def _compute_best_identity(
|
|
690
|
+
self,
|
|
691
|
+
emb: List[float],
|
|
692
|
+
location: str = "",
|
|
693
|
+
timestamp: str = "",
|
|
694
|
+
search_id: Optional[str] = None,
|
|
695
|
+
) -> Tuple[Optional[str], str, float, Optional[str], Dict[str, Any], str]:
|
|
132
696
|
"""
|
|
133
697
|
Find best identity match using local similarity search (fast) with optional API fallback.
|
|
134
698
|
Returns (staff_id, person_name, score, employee_id, staff_details, detection_type).
|
|
@@ -138,11 +702,86 @@ class TemporalIdentityManager:
|
|
|
138
702
|
"""
|
|
139
703
|
if not emb or not isinstance(emb, list):
|
|
140
704
|
return None, "Unknown", 0.0, None, {}, "unknown"
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
705
|
+
|
|
706
|
+
#-------------- New Redis API Fast Call Start------------------------------------------------------------------------------------------------------------------------------
|
|
707
|
+
# ALWAYS attempt Redis match for every detection (required for every frame)
|
|
708
|
+
if self.redis_matcher:
|
|
709
|
+
try:
|
|
710
|
+
self.logger.debug(f"Attempting Redis match for search_id={search_id}, embedding_len={len(emb) if emb else 0}")
|
|
711
|
+
redis_start_time = time.time()
|
|
712
|
+
redis_match = await self.redis_matcher.match_embedding(
|
|
713
|
+
embedding=emb,
|
|
714
|
+
search_id=search_id,
|
|
715
|
+
location=location or "",
|
|
716
|
+
min_confidence=self.threshold, # Use recognition threshold instead of default_min_confidence
|
|
717
|
+
)
|
|
718
|
+
redis_latency_ms = (time.time() - redis_start_time) * 1000.0
|
|
719
|
+
|
|
720
|
+
if redis_match:
|
|
721
|
+
self.logger.info(
|
|
722
|
+
f"Redis match found in {redis_latency_ms:.2f}ms - staff_id={redis_match.staff_id}, "
|
|
723
|
+
f"person_name={redis_match.person_name}, confidence={redis_match.confidence:.3f}"
|
|
724
|
+
)
|
|
725
|
+
print(
|
|
726
|
+
f"Redis match found in {redis_latency_ms:.2f}ms - staff_id={redis_match.staff_id}, "
|
|
727
|
+
f"person_name={redis_match.person_name}, confidence={redis_match.confidence:.3f}"
|
|
728
|
+
)
|
|
729
|
+
|
|
730
|
+
if redis_match.staff_id:
|
|
731
|
+
staff_details = (
|
|
732
|
+
dict(redis_match.raw) if isinstance(redis_match.raw, dict) else {}
|
|
733
|
+
)
|
|
734
|
+
if redis_match.person_name and not staff_details.get("name"):
|
|
735
|
+
staff_details["name"] = redis_match.person_name
|
|
736
|
+
|
|
737
|
+
# Check if confidence meets threshold
|
|
738
|
+
if float(redis_match.confidence) >= self.threshold:
|
|
739
|
+
self.logger.info(
|
|
740
|
+
"Redis embedding match ACCEPTED - staff_id=%s, person_name=%s, score=%.3f (threshold=%.3f)",
|
|
741
|
+
redis_match.staff_id,
|
|
742
|
+
redis_match.person_name,
|
|
743
|
+
float(redis_match.confidence),
|
|
744
|
+
self.threshold,
|
|
745
|
+
)
|
|
746
|
+
print(
|
|
747
|
+
f"Redis embedding match ACCEPTED - staff_id={redis_match.staff_id}, "
|
|
748
|
+
f"person_name={redis_match.person_name}, score={redis_match.confidence:.3f} "
|
|
749
|
+
f"(threshold={self.threshold:.3f})"
|
|
750
|
+
)
|
|
751
|
+
return (
|
|
752
|
+
str(redis_match.staff_id),
|
|
753
|
+
redis_match.person_name or "Unknown",
|
|
754
|
+
float(redis_match.confidence),
|
|
755
|
+
redis_match.employee_id,
|
|
756
|
+
staff_details,
|
|
757
|
+
"known",
|
|
758
|
+
)
|
|
759
|
+
else:
|
|
760
|
+
self.logger.debug(
|
|
761
|
+
"Redis embedding match REJECTED - confidence %.3f below threshold %.3f",
|
|
762
|
+
float(redis_match.confidence),
|
|
763
|
+
self.threshold,
|
|
764
|
+
)
|
|
765
|
+
print(
|
|
766
|
+
f"Redis embedding match REJECTED - confidence {redis_match.confidence:.3f} "
|
|
767
|
+
f"below threshold {self.threshold:.3f}"
|
|
768
|
+
)
|
|
769
|
+
else:
|
|
770
|
+
self.logger.warning("Redis match returned but staff_id is None/empty")
|
|
771
|
+
print("WARNING: Redis match returned but staff_id is None/empty")
|
|
772
|
+
else:
|
|
773
|
+
self.logger.debug(f"No Redis match found for search_id={search_id} (took {redis_latency_ms:.2f}ms)")
|
|
774
|
+
print(f"No Redis match found for search_id={search_id} (took {redis_latency_ms:.2f}ms)")
|
|
775
|
+
except Exception as exc:
|
|
776
|
+
self.logger.warning(
|
|
777
|
+
"Redis face match flow failed; falling back to local search: %s",
|
|
778
|
+
exc,
|
|
779
|
+
exc_info=True,
|
|
780
|
+
)
|
|
781
|
+
print(f"Redis face match flow failed: {exc}")
|
|
782
|
+
#-------------- New Redis API Fast Call END------------------------------------------------------------------------------------------------------------------------------
|
|
144
783
|
# PRIMARY PATH: Local similarity search using EmbeddingManager (FAST - ~1-5ms)
|
|
145
|
-
if self.embedding_manager:
|
|
784
|
+
if self.embedding_manager and self.emb_run:
|
|
146
785
|
# Defensive check: ensure embeddings are loaded before attempting search
|
|
147
786
|
if not self.embedding_manager.is_ready():
|
|
148
787
|
status = self.embedding_manager.get_status()
|
|
@@ -169,10 +808,6 @@ class TemporalIdentityManager:
|
|
|
169
808
|
elif first_name or last_name:
|
|
170
809
|
person_name = f"{first_name or ''} {last_name or ''}".strip() or "Unknown"
|
|
171
810
|
|
|
172
|
-
# print("------------------FACE RECOG TEMPORAL IDENTITY MANAGER UPDATE - COMPUTE BEST IDENTITY (LOCAL)----------------------------")
|
|
173
|
-
# print("LATENCY:",(time.time() - st10)*1000,"| Throughput fps:",(1.0 / (time.time() - st10)) if (time.time() - st10) > 0 else None)
|
|
174
|
-
# print(f"LOCAL MATCH: staff_id={staff_embedding.staff_id}, similarity={similarity_score:.3f}")
|
|
175
|
-
# print("------------------FACE RECOG TEMPORAL IDENTITY MANAGER UPDATE - COMPUTE BEST IDENTITY (LOCAL)----------------------------")
|
|
176
811
|
|
|
177
812
|
self.logger.info(f"Local embedding match - staff_id={staff_embedding.staff_id}, person_name={person_name}, score={similarity_score:.3f}")
|
|
178
813
|
|
|
@@ -192,10 +827,6 @@ class TemporalIdentityManager:
|
|
|
192
827
|
except Exception:
|
|
193
828
|
pass
|
|
194
829
|
self.logger.debug(f"No local match found - best_similarity={best_sim:.3f}, threshold={self.threshold:.3f}")
|
|
195
|
-
# print("------------------FACE RECOG TEMPORAL IDENTITY MANAGER UPDATE - COMPUTE BEST IDENTITY (LOCAL - NO MATCH)----------------------------")
|
|
196
|
-
# print("LATENCY:",(time.time() - st10)*1000,"| Throughput fps:",(1.0 / (time.time() - st10)) if (time.time() - st10) > 0 else None)
|
|
197
|
-
# print(f"BEST_SIM={best_sim:.3f} THRESH={self.threshold:.3f}")
|
|
198
|
-
# print("------------------FACE RECOG TEMPORAL IDENTITY MANAGER UPDATE - COMPUTE BEST IDENTITY (LOCAL - NO MATCH)----------------------------")
|
|
199
830
|
|
|
200
831
|
return None, "Unknown", 0.0, None, {}, "unknown"
|
|
201
832
|
|
|
@@ -203,74 +834,83 @@ class TemporalIdentityManager:
|
|
|
203
834
|
self.logger.warning(f"Local similarity search failed, falling back to API: {e}")
|
|
204
835
|
# Fall through to API call below
|
|
205
836
|
|
|
837
|
+
#---------------------------------BACKUP MONGODB API SLOW CALL--------------------------------------------------------------------------------------
|
|
206
838
|
# FALLBACK PATH: API call (SLOW - ~2000ms) - only if embedding manager not available
|
|
207
839
|
# This path should rarely be used in production
|
|
208
|
-
try:
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
# print("LATENCY:",(time.time() - st10)*1000,"| Throughput fps:",(1.0 / (time.time() - st10)) if (time.time() - st10) > 0 else None)
|
|
220
|
-
# print("WARNING: Using slow API fallback!")
|
|
221
|
-
# print("------------------FACE RECOG TEMPORAL IDENTITY MANAGER UPDATE - COMPUTE BEST IDENTITY (API FALLBACK)----------------------------")
|
|
222
|
-
|
|
223
|
-
except Exception as e:
|
|
224
|
-
self.logger.error(f"API ERROR: Failed to search similar faces in _compute_best_identity: {e}", exc_info=True)
|
|
225
|
-
return None, "Unknown", 0.0, None, {}, "unknown"
|
|
840
|
+
# try:
|
|
841
|
+
# self.logger.warning("Using slow API fallback for identity search - consider checking embedding manager initialization")
|
|
842
|
+
# resp = await self.face_client.search_similar_faces(
|
|
843
|
+
# face_embedding=emb,
|
|
844
|
+
# threshold=0.01, # low threshold to always get top-1
|
|
845
|
+
# limit=1,
|
|
846
|
+
# collection="staff_enrollment",
|
|
847
|
+
# location=location,
|
|
848
|
+
# timestamp=timestamp,
|
|
849
|
+
# )
|
|
850
|
+
|
|
226
851
|
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
if isinstance(resp, dict):
|
|
231
|
-
if isinstance(resp.get("data"), list):
|
|
232
|
-
results = resp.get("data", [])
|
|
233
|
-
elif isinstance(resp.get("results"), list):
|
|
234
|
-
results = resp.get("results", [])
|
|
235
|
-
elif isinstance(resp.get("items"), list):
|
|
236
|
-
results = resp.get("items", [])
|
|
237
|
-
elif isinstance(resp, list):
|
|
238
|
-
results = resp
|
|
239
|
-
|
|
240
|
-
if not results:
|
|
241
|
-
self.logger.debug("No identity match found from API")
|
|
242
|
-
return None, "Unknown", 0.0, None, {}, "unknown"
|
|
852
|
+
# except Exception as e:
|
|
853
|
+
# self.logger.error(f"API ERROR: Failed to search similar faces in _compute_best_identity: {e}", exc_info=True)
|
|
854
|
+
# return None, "Unknown", 0.0, None, {}, "unknown"
|
|
243
855
|
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
856
|
+
# try:
|
|
857
|
+
# results: List[Any] = []
|
|
858
|
+
# self.logger.debug('API Response received for identity search')
|
|
859
|
+
# if isinstance(resp, dict):
|
|
860
|
+
# if isinstance(resp.get("data"), list):
|
|
861
|
+
# results = resp.get("data", [])
|
|
862
|
+
# elif isinstance(resp.get("results"), list):
|
|
863
|
+
# results = resp.get("results", [])
|
|
864
|
+
# elif isinstance(resp.get("items"), list):
|
|
865
|
+
# results = resp.get("items", [])
|
|
866
|
+
# elif isinstance(resp, list):
|
|
867
|
+
# results = resp
|
|
868
|
+
|
|
869
|
+
# if not results:
|
|
870
|
+
# self.logger.debug("No identity match found from API")
|
|
871
|
+
# return None, "Unknown", 0.0, None, {}, "unknown"
|
|
872
|
+
|
|
873
|
+
# item = results[0] if isinstance(results, list) else results
|
|
874
|
+
# self.logger.debug(f'Top-1 match from API: {item}')
|
|
875
|
+
# # Be defensive with keys and types
|
|
876
|
+
# staff_id = item.get("staffId") if isinstance(item, dict) else None
|
|
877
|
+
# employee_id = str(item.get("_id")) if isinstance(item, dict) and item.get("_id") is not None else None
|
|
878
|
+
# score = float(item.get("score", 0.0)) if isinstance(item, dict) else 0.0
|
|
879
|
+
# detection_type = str(item.get("detectionType", "unknown")) if isinstance(item, dict) else "unknown"
|
|
880
|
+
# staff_details = item.get("staffDetails", {}) if isinstance(item, dict) else {}
|
|
881
|
+
# # Extract a person name from staff_details
|
|
882
|
+
# person_name = "Unknown"
|
|
883
|
+
# if isinstance(staff_details, dict) and staff_details:
|
|
884
|
+
# first_name = staff_details.get("firstName")
|
|
885
|
+
# last_name = staff_details.get("lastName")
|
|
886
|
+
# name = staff_details.get("name")
|
|
887
|
+
# if name:
|
|
888
|
+
# person_name = str(name)
|
|
889
|
+
# else:
|
|
890
|
+
# if first_name or last_name:
|
|
891
|
+
# person_name = f"{first_name or ''} {last_name or ''}".strip() or "UnknowNN" #TODO:ebugging change to normal once done
|
|
892
|
+
# # If API says unknown or missing staff_id, treat as unknown
|
|
893
|
+
# if not staff_id: #or detection_type == "unknown"
|
|
894
|
+
# self.logger.debug(f"API returned unknown or missing staff_id - score={score}, employee_id={employee_id}")
|
|
895
|
+
# return None, "Unknown", float(score), employee_id, staff_details if isinstance(staff_details, dict) else {}, "unknown"
|
|
896
|
+
# self.logger.info(f"API identified face - staff_id={staff_id}, person_name={person_name}, score={score:.3f}")
|
|
897
|
+
# return str(staff_id), person_name, float(score), employee_id, staff_details if isinstance(staff_details, dict) else {}, "known"
|
|
898
|
+
# except Exception as e:
|
|
899
|
+
# self.logger.error(f"Error parsing API response in _compute_best_identity: {e}", exc_info=True)
|
|
900
|
+
# return None, "Unknown", 0.0, None, {}, "unknown"
|
|
901
|
+
#---------------------------------BACKUP MONGODB API SLOW CALL--------------------------------------------------------------------------------------
|
|
902
|
+
|
|
903
|
+
# If we reach here, no match was found through any method
|
|
904
|
+
self.logger.debug("No identity match found - returning unknown")
|
|
905
|
+
return None, "Unknown", 0.0, None, {}, "unknown"
|
|
272
906
|
|
|
273
|
-
async def _compute_best_identity_from_history(
|
|
907
|
+
async def _compute_best_identity_from_history(
|
|
908
|
+
self,
|
|
909
|
+
track_state: Dict[str, object],
|
|
910
|
+
location: str = "",
|
|
911
|
+
timestamp: str = "",
|
|
912
|
+
search_id: Optional[str] = None,
|
|
913
|
+
) -> Tuple[Optional[str], str, float, Optional[str], Dict[str, Any], str]:
|
|
274
914
|
hist: deque = track_state.get("embedding_history", deque()) # type: ignore
|
|
275
915
|
if not hist:
|
|
276
916
|
return None, "Unknown", 0.0, None, {}, "unknown"
|
|
@@ -281,7 +921,12 @@ class TemporalIdentityManager:
|
|
|
281
921
|
except Exception as e:
|
|
282
922
|
self.logger.error(f"Error computing prototype from history: {e}", exc_info=True)
|
|
283
923
|
proto_list = []
|
|
284
|
-
return await self._compute_best_identity(
|
|
924
|
+
return await self._compute_best_identity(
|
|
925
|
+
proto_list,
|
|
926
|
+
location=location,
|
|
927
|
+
timestamp=timestamp,
|
|
928
|
+
search_id=search_id,
|
|
929
|
+
)
|
|
285
930
|
|
|
286
931
|
async def update(
|
|
287
932
|
self,
|
|
@@ -290,6 +935,7 @@ class TemporalIdentityManager:
|
|
|
290
935
|
eligible_for_recognition: bool,
|
|
291
936
|
location: str = "",
|
|
292
937
|
timestamp: str = "",
|
|
938
|
+
search_id: Optional[str] = None,
|
|
293
939
|
) -> Tuple[Optional[str], str, float, Optional[str], Dict[str, Any], str]:
|
|
294
940
|
"""
|
|
295
941
|
Update temporal identity state for a track and return a stabilized identity.
|
|
@@ -321,7 +967,7 @@ class TemporalIdentityManager:
|
|
|
321
967
|
if eligible_for_recognition and emb:
|
|
322
968
|
st8=time.time()
|
|
323
969
|
staff_id, person_name, inst_score, employee_id, staff_details, det_type = await self._compute_best_identity(
|
|
324
|
-
emb, location=location, timestamp=timestamp
|
|
970
|
+
emb, location=location, timestamp=timestamp, search_id=search_id
|
|
325
971
|
)
|
|
326
972
|
# print("------------------FACE RECOG TEMPORAL IDENTITY MANAGER UPDATE - COMPUTE BEST IDENTITY_I----------------------------")
|
|
327
973
|
# print("LATENCY:",(time.time() - st8)*1000,"| Throughput fps:",(1.0 / (time.time() - st8)) if (time.time() - st8) > 0 else None)
|
|
@@ -386,7 +1032,10 @@ class TemporalIdentityManager:
|
|
|
386
1032
|
|
|
387
1033
|
# Fallback: use prototype from history
|
|
388
1034
|
st9=time.time()
|
|
389
|
-
|
|
1035
|
+
history_search_id = f"{search_id}_hist" if search_id else None
|
|
1036
|
+
fb_staff_id, fb_name, fb_score, fb_employee_id, fb_details, fb_type = await self._compute_best_identity_from_history(
|
|
1037
|
+
s, location=location, timestamp=timestamp, search_id=history_search_id
|
|
1038
|
+
)
|
|
390
1039
|
# print("------------------FACE RECOG TEMPORAL IDENTITY MANAGER UPDATE - COMPUTE BEST IDENTITY FROM HISTORY----------------------------")
|
|
391
1040
|
# print("LATENCY:",(time.time() - st9)*1000,"| Throughput fps:",(1.0 / (time.time() - st9)) if (time.time() - st9) > 0 else None)
|
|
392
1041
|
# print("------------------FACE RECOG TEMPORAL IDENTITY MANAGER UPDATE - COMPUTE BEST IDENTITY FROM HISTORY----------------------------")
|
|
@@ -427,9 +1076,9 @@ class FaceRecognitionEmbeddingConfig(BaseConfig):
|
|
|
427
1076
|
smoothing_confidence_range_factor: float = 0.5
|
|
428
1077
|
|
|
429
1078
|
# Base confidence threshold (separate from embedding similarity threshold)
|
|
430
|
-
similarity_threshold: float = 0.
|
|
1079
|
+
similarity_threshold: float = 0.3 # Lowered to match local code - 0.45 was too conservative
|
|
431
1080
|
# Base confidence threshold (separate from embedding similarity threshold)
|
|
432
|
-
confidence_threshold: float = 0.
|
|
1081
|
+
confidence_threshold: float = 0.06 # Detection confidence threshold
|
|
433
1082
|
|
|
434
1083
|
# Face recognition optional features
|
|
435
1084
|
enable_face_tracking: bool = True # Enable BYTE TRACKER advanced face tracking -- KEEP IT TRUE ALWAYS
|
|
@@ -523,6 +1172,7 @@ class FaceRecognitionEmbeddingUseCase(BaseProcessor):
|
|
|
523
1172
|
|
|
524
1173
|
# Initialize EmbeddingManager - will be configured in process method
|
|
525
1174
|
self.embedding_manager = None
|
|
1175
|
+
self.redis_face_matcher = None
|
|
526
1176
|
# Temporal identity manager for API-based top-1 identity smoothing
|
|
527
1177
|
self.temporal_identity_manager = None
|
|
528
1178
|
# Removed lightweight face tracker fallback; we always use AdvancedTracker
|
|
@@ -542,7 +1192,7 @@ class FaceRecognitionEmbeddingUseCase(BaseProcessor):
|
|
|
542
1192
|
# Initialization must be done by calling await initialize(config) after instantiation
|
|
543
1193
|
# This is handled in PostProcessor._get_use_case_instance()
|
|
544
1194
|
|
|
545
|
-
async def initialize(self, config: Optional[FaceRecognitionEmbeddingConfig] = None) -> None:
|
|
1195
|
+
async def initialize(self, config: Optional[FaceRecognitionEmbeddingConfig] = None, emb:bool=False) -> None:
|
|
546
1196
|
"""
|
|
547
1197
|
Async initialization method to set up face client and all components.
|
|
548
1198
|
Must be called after __init__ before process() can be called.
|
|
@@ -563,7 +1213,7 @@ class FaceRecognitionEmbeddingUseCase(BaseProcessor):
|
|
|
563
1213
|
|
|
564
1214
|
Args:
|
|
565
1215
|
config: Optional config to use. If not provided, uses config from __init__.
|
|
566
|
-
|
|
1216
|
+
emb: Optional boolean to indicate if embedding manager should be loaded. If True, embedding manager will be loaded.
|
|
567
1217
|
Raises:
|
|
568
1218
|
RuntimeError: If embeddings fail to load or verification fails
|
|
569
1219
|
"""
|
|
@@ -584,69 +1234,80 @@ class FaceRecognitionEmbeddingUseCase(BaseProcessor):
|
|
|
584
1234
|
raise TypeError(f"Invalid config type for initialization: {type(init_config)}, expected FaceRecognitionEmbeddingConfig")
|
|
585
1235
|
|
|
586
1236
|
self.logger.info("Initializing face recognition use case with provided config")
|
|
587
|
-
|
|
1237
|
+
# print("=============== STEP 1: INITIALIZING FACE CLIENT ===============")
|
|
588
1238
|
|
|
589
1239
|
# Initialize face client (includes deployment update)
|
|
590
1240
|
try:
|
|
591
1241
|
self.face_client = await self._get_facial_recognition_client(init_config)
|
|
592
|
-
|
|
1242
|
+
# print(f"=============== FACE CLIENT INITIALIZED: {self.face_client is not None} ===============")
|
|
593
1243
|
|
|
594
1244
|
# Initialize People activity logging if enabled
|
|
595
1245
|
if init_config.enable_people_activity_logging:
|
|
596
1246
|
self.people_activity_logging = PeopleActivityLogging(self.face_client)
|
|
597
1247
|
# PeopleActivityLogging starts its background thread in __init__
|
|
598
1248
|
self.logger.info("People activity logging enabled and started")
|
|
1249
|
+
|
|
1250
|
+
# Initialize Redis face matcher for fast remote similarity search
|
|
1251
|
+
try:
|
|
1252
|
+
redis_session = getattr(self.face_client, "session", None)
|
|
1253
|
+
except Exception:
|
|
1254
|
+
redis_session = None
|
|
1255
|
+
self.redis_face_matcher = RedisFaceMatcher(
|
|
1256
|
+
session=redis_session,
|
|
1257
|
+
logger=self.logger,
|
|
1258
|
+
face_client=self.face_client,
|
|
1259
|
+
)
|
|
599
1260
|
|
|
600
1261
|
# Initialize EmbeddingManager
|
|
601
|
-
|
|
602
1262
|
if not init_config.embedding_config:
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
1263
|
+
|
|
1264
|
+
init_config.embedding_config = EmbeddingConfig(
|
|
1265
|
+
similarity_threshold=init_config.similarity_threshold,
|
|
1266
|
+
confidence_threshold=init_config.confidence_threshold,
|
|
1267
|
+
enable_track_id_cache=init_config.enable_track_id_cache,
|
|
1268
|
+
cache_max_size=init_config.cache_max_size,
|
|
1269
|
+
cache_ttl=3600,
|
|
1270
|
+
background_refresh_interval=43200,
|
|
1271
|
+
staff_embeddings_cache_ttl=43200,
|
|
1272
|
+
)
|
|
613
1273
|
self.embedding_manager = EmbeddingManager(init_config.embedding_config, self.face_client)
|
|
614
|
-
|
|
1274
|
+
|
|
615
1275
|
self.logger.info("Embedding manager initialized")
|
|
1276
|
+
if emb:
|
|
1277
|
+
|
|
1278
|
+
# Load staff embeddings immediately for fast startup (avoid race conditions)
|
|
1279
|
+
# This MUST succeed before we can proceed - fail fast if it doesn't
|
|
1280
|
+
|
|
1281
|
+
embeddings_loaded = await self.embedding_manager._load_staff_embeddings()
|
|
1282
|
+
|
|
1283
|
+
if not embeddings_loaded:
|
|
1284
|
+
error_msg = "CRITICAL: Failed to load staff embeddings at initialization - cannot proceed without embeddings"
|
|
1285
|
+
print(f"=============== {error_msg} ===============")
|
|
1286
|
+
self.logger.error(error_msg)
|
|
1287
|
+
raise RuntimeError(error_msg)
|
|
1288
|
+
|
|
1289
|
+
# Verify embeddings are actually loaded using is_ready() method
|
|
1290
|
+
if not self.embedding_manager.is_ready():
|
|
1291
|
+
status = self.embedding_manager.get_status()
|
|
1292
|
+
error_msg = f"CRITICAL: Embeddings not ready after load - status: {status}"
|
|
1293
|
+
print(f"=============== {error_msg} ===============")
|
|
1294
|
+
self.logger.error(error_msg)
|
|
1295
|
+
raise RuntimeError(error_msg)
|
|
1296
|
+
|
|
1297
|
+
self.logger.info(f"Successfully loaded {len(self.embedding_manager.staff_embeddings)} staff embeddings at initialization")
|
|
616
1298
|
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
print(f"=============== {error_msg} ===============")
|
|
623
|
-
self.logger.error(error_msg)
|
|
624
|
-
raise RuntimeError(error_msg)
|
|
625
|
-
|
|
626
|
-
# Verify embeddings are actually loaded using is_ready() method
|
|
627
|
-
if not self.embedding_manager.is_ready():
|
|
628
|
-
status = self.embedding_manager.get_status()
|
|
629
|
-
error_msg = f"CRITICAL: Embeddings not ready after load - status: {status}"
|
|
630
|
-
print(f"=============== {error_msg} ===============")
|
|
631
|
-
self.logger.error(error_msg)
|
|
632
|
-
raise RuntimeError(error_msg)
|
|
633
|
-
|
|
634
|
-
# print(f"=============== STAFF EMBEDDINGS COUNT: {len(self.embedding_manager.staff_embeddings)} ===============")
|
|
635
|
-
# print(f"=============== EMBEDDINGS MATRIX SHAPE: {self.embedding_manager.embeddings_matrix.shape} ===============")
|
|
636
|
-
# print(f"=============== EMBEDDINGS LOADED FLAG: {self.embedding_manager._embeddings_loaded} ===============")
|
|
637
|
-
self.logger.info(f"Successfully loaded {len(self.embedding_manager.staff_embeddings)} staff embeddings at initialization")
|
|
638
|
-
|
|
639
|
-
# NOW start background refresh after successful initial load (prevents race conditions)
|
|
640
|
-
if init_config.embedding_config.enable_background_refresh:
|
|
641
|
-
# print("=============== STEP 4: STARTING BACKGROUND REFRESH ===============")
|
|
642
|
-
self.embedding_manager.start_background_refresh()
|
|
643
|
-
self.logger.info("Background embedding refresh started after successful initial load")
|
|
1299
|
+
# NOW start background refresh after successful initial load (prevents race conditions)
|
|
1300
|
+
if init_config.embedding_config.enable_background_refresh:
|
|
1301
|
+
# print("=============== STEP 4: STARTING BACKGROUND REFRESH ===============")
|
|
1302
|
+
self.embedding_manager.start_background_refresh()
|
|
1303
|
+
self.logger.info("Background embedding refresh started after successful initial load")
|
|
644
1304
|
|
|
645
1305
|
# Initialize TemporalIdentityManager with EmbeddingManager for fast local search
|
|
646
1306
|
# print("=============== STEP 5: INITIALIZING TEMPORAL IDENTITY MANAGER ===============")
|
|
647
1307
|
self.temporal_identity_manager = TemporalIdentityManager(
|
|
648
1308
|
face_client=self.face_client,
|
|
649
1309
|
embedding_manager=self.embedding_manager,
|
|
1310
|
+
redis_matcher=self.redis_face_matcher,
|
|
650
1311
|
recognition_threshold=float(init_config.similarity_threshold),
|
|
651
1312
|
history_size=20,
|
|
652
1313
|
unknown_patience=7,
|
|
@@ -656,20 +1317,21 @@ class FaceRecognitionEmbeddingUseCase(BaseProcessor):
|
|
|
656
1317
|
self.logger.info("Temporal identity manager initialized with embedding manager for local similarity search")
|
|
657
1318
|
|
|
658
1319
|
# Final verification before marking as initialized
|
|
659
|
-
|
|
660
|
-
if not self.embedding_manager.is_ready():
|
|
661
|
-
status = self.embedding_manager.get_status()
|
|
662
|
-
error_msg = f"CRITICAL: Final verification failed - embeddings not ready. Status: {status}"
|
|
663
|
-
print(f"=============== {error_msg} ===============")
|
|
664
|
-
self.logger.error(error_msg)
|
|
665
|
-
raise RuntimeError(error_msg)
|
|
666
1320
|
|
|
667
|
-
#
|
|
668
|
-
status = self.embedding_manager.get_status()
|
|
1321
|
+
# if not self.embedding_manager.is_ready():
|
|
1322
|
+
# status = self.embedding_manager.get_status()
|
|
1323
|
+
# error_msg = f"CRITICAL: Final verification failed - embeddings not ready. Status: {status}"
|
|
1324
|
+
# print(f"=============== {error_msg} ===============")
|
|
1325
|
+
# self.logger.error(error_msg)
|
|
1326
|
+
# raise RuntimeError(error_msg)
|
|
1327
|
+
|
|
1328
|
+
# # Log detailed status for debugging
|
|
1329
|
+
# status = self.embedding_manager.get_status()
|
|
1330
|
+
|
|
669
1331
|
|
|
670
1332
|
self._initialized = True
|
|
671
1333
|
self.logger.info("Face recognition use case fully initialized and verified")
|
|
672
|
-
|
|
1334
|
+
|
|
673
1335
|
except Exception as e:
|
|
674
1336
|
self.logger.error(f"Error during use case initialization: {e}", exc_info=True)
|
|
675
1337
|
raise RuntimeError(f"Failed to initialize face recognition use case: {e}") from e
|
|
@@ -683,21 +1345,46 @@ class FaceRecognitionEmbeddingUseCase(BaseProcessor):
|
|
|
683
1345
|
self.logger.info(
|
|
684
1346
|
f"Initializing face recognition client with server ID: {config.facial_recognition_server_id}"
|
|
685
1347
|
)
|
|
1348
|
+
print(f"=============== CONFIG: {config} ===============")
|
|
1349
|
+
print(f"=============== CONFIG.SESSION: {config.session} ===============")
|
|
1350
|
+
account_number = os.getenv("MATRICE_ACCOUNT_NUMBER", "")
|
|
1351
|
+
access_key_id = os.getenv("MATRICE_ACCESS_KEY_ID", "")
|
|
1352
|
+
secret_key = os.getenv("MATRICE_SECRET_ACCESS_KEY", "")
|
|
1353
|
+
project_id = os.getenv("MATRICE_PROJECT_ID", "")
|
|
1354
|
+
|
|
1355
|
+
self.session1 = Session(
|
|
1356
|
+
account_number=account_number,
|
|
1357
|
+
access_key=access_key_id,
|
|
1358
|
+
secret_key=secret_key,
|
|
1359
|
+
project_id=project_id,
|
|
1360
|
+
)
|
|
686
1361
|
self.face_client = FacialRecognitionClient(
|
|
687
|
-
server_id=config.facial_recognition_server_id, session=
|
|
1362
|
+
server_id=config.facial_recognition_server_id, session=self.session1
|
|
688
1363
|
)
|
|
689
1364
|
self.logger.info("Face recognition client initialized")
|
|
690
|
-
|
|
1365
|
+
|
|
691
1366
|
# Call update_deployment if deployment_id is provided
|
|
692
1367
|
if config.deployment_id:
|
|
693
1368
|
try:
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
1369
|
+
# Create temporary RedisFaceMatcher to get app_deployment_id using verified method
|
|
1370
|
+
redis_session = getattr(self.face_client, "session", None) or config.session
|
|
1371
|
+
temp_redis_matcher = RedisFaceMatcher(
|
|
1372
|
+
session=redis_session,
|
|
1373
|
+
logger=self.logger,
|
|
1374
|
+
face_client=self.face_client,
|
|
1375
|
+
)
|
|
1376
|
+
app_deployment_id = await temp_redis_matcher._ensure_app_deployment_id()
|
|
1377
|
+
|
|
1378
|
+
if app_deployment_id:
|
|
1379
|
+
self.logger.info(f"Updating deployment action with app_deployment_id: {app_deployment_id}")
|
|
1380
|
+
response = await self.face_client.update_deployment_action(app_deployment_id)
|
|
1381
|
+
if response:
|
|
1382
|
+
self.logger.info(f"Successfully updated deployment action {app_deployment_id}")
|
|
1383
|
+
else:
|
|
1384
|
+
self.logger.warning(f"Failed to update deployment: {response.get('error', 'Unknown error')}")
|
|
698
1385
|
else:
|
|
699
|
-
self.logger.warning(
|
|
700
|
-
|
|
1386
|
+
self.logger.warning("Could not resolve app_deployment_id, skipping deployment action update")
|
|
1387
|
+
|
|
701
1388
|
self.logger.info(f"Updating deployment with ID: {config.deployment_id}")
|
|
702
1389
|
response = await self.face_client.update_deployment(config.deployment_id)
|
|
703
1390
|
if response:
|
|
@@ -818,9 +1505,7 @@ class FaceRecognitionEmbeddingUseCase(BaseProcessor):
|
|
|
818
1505
|
)
|
|
819
1506
|
self.logger.debug("Applied category filtering")
|
|
820
1507
|
|
|
821
|
-
|
|
822
|
-
# print(self._initialized,"LATENCY:",(time.time() - processing_start)*1000,"| Throughput fps:",(1.0 / (time.time() - processing_start)) if (time.time() - processing_start) > 0 else None)
|
|
823
|
-
# print("------------------TILL TRACKER MS----------------------------")
|
|
1508
|
+
|
|
824
1509
|
# Advanced tracking (BYTETracker-like) - only if enabled
|
|
825
1510
|
if config.enable_face_tracking:
|
|
826
1511
|
from ..advanced_tracker import AdvancedTracker
|
|
@@ -860,24 +1545,21 @@ class FaceRecognitionEmbeddingUseCase(BaseProcessor):
|
|
|
860
1545
|
recognized_persons = {}
|
|
861
1546
|
current_frame_staff_details = {}
|
|
862
1547
|
|
|
863
|
-
# print("------------------TRACKER INIT END----------------------------")
|
|
864
|
-
# print("LATENCY:",(time.time() - processing_start)*1000,"| Throughput fps:",(1.0 / (time.time() - processing_start)) if (time.time() - processing_start) > 0 else None)
|
|
865
|
-
# print("------------------TRACKER INIT END----------------------------")
|
|
866
1548
|
|
|
867
1549
|
# Process face recognition for each detection (if enabled)
|
|
868
1550
|
if config.enable_face_recognition:
|
|
869
1551
|
# Additional safety check: verify embeddings are still loaded and ready
|
|
870
|
-
if not self.embedding_manager or not self.embedding_manager.is_ready():
|
|
871
|
-
|
|
872
|
-
|
|
873
|
-
|
|
874
|
-
|
|
875
|
-
|
|
876
|
-
|
|
877
|
-
|
|
878
|
-
|
|
879
|
-
|
|
880
|
-
|
|
1552
|
+
# if not self.embedding_manager or not self.embedding_manager.is_ready():
|
|
1553
|
+
# status = self.embedding_manager.get_status() if self.embedding_manager else {}
|
|
1554
|
+
# error_msg = f"CRITICAL: Cannot process face recognition - embeddings not ready. Status: {status}"
|
|
1555
|
+
# self.logger.error(error_msg)
|
|
1556
|
+
# print(f"ERROR: {error_msg}")
|
|
1557
|
+
# return self.create_error_result(
|
|
1558
|
+
# error_msg,
|
|
1559
|
+
# usecase=self.name,
|
|
1560
|
+
# category=self.category,
|
|
1561
|
+
# context=context,
|
|
1562
|
+
# )
|
|
881
1563
|
|
|
882
1564
|
face_recognition_result = await self._process_face_recognition(
|
|
883
1565
|
processed_data, config, stream_info, input_bytes
|
|
@@ -891,9 +1573,6 @@ class FaceRecognitionEmbeddingUseCase(BaseProcessor):
|
|
|
891
1573
|
detection["recognition_status"] = "disabled"
|
|
892
1574
|
detection["enrolled"] = False
|
|
893
1575
|
|
|
894
|
-
# print("------------------FACE RECONG CONFIG ENABLED----------------------------")
|
|
895
|
-
# print("LATENCY:",(time.time() - processing_start)*1000,"| Throughput fps:",(1.0 / (time.time() - processing_start)) if (time.time() - processing_start) > 0 else None)
|
|
896
|
-
# print("------------------FACE RECONG CONFIG ENABLED----------------------------")
|
|
897
1576
|
|
|
898
1577
|
# Update tracking state for total count per label
|
|
899
1578
|
self._update_tracking_state(processed_data)
|
|
@@ -927,9 +1606,6 @@ class FaceRecognitionEmbeddingUseCase(BaseProcessor):
|
|
|
927
1606
|
current_recognized_count, current_unknown_count, recognized_persons
|
|
928
1607
|
))
|
|
929
1608
|
|
|
930
|
-
# print("------------------TILL FACE RECOG SUMMARY----------------------------")
|
|
931
|
-
# print("LATENCY:",(time.time() - processing_start)*1000,"| Throughput fps:",(1.0 / (time.time() - processing_start)) if (time.time() - processing_start) > 0 else None)
|
|
932
|
-
# print("------------------TILL FACE RECOG SUMMARY----------------------------")
|
|
933
1609
|
|
|
934
1610
|
# Add detections to the counting summary (standard pattern for detection use cases)
|
|
935
1611
|
# Ensure display label is present for UI (does not affect logic/counters)
|
|
@@ -962,10 +1638,6 @@ class FaceRecognitionEmbeddingUseCase(BaseProcessor):
|
|
|
962
1638
|
)
|
|
963
1639
|
summary = summary_list[0] if summary_list else {}
|
|
964
1640
|
|
|
965
|
-
# print("------------------TILL TRACKING STATS----------------------------")
|
|
966
|
-
# print("LATENCY:",(time.time() - processing_start)*1000,"| Throughput fps:",(1.0 / (time.time() - processing_start)) if (time.time() - processing_start) > 0 else None)
|
|
967
|
-
# print("------------------TILL TRACKING STATS----------------------------")
|
|
968
|
-
|
|
969
1641
|
|
|
970
1642
|
agg_summary = {
|
|
971
1643
|
str(frame_number): {
|
|
@@ -1039,6 +1711,12 @@ class FaceRecognitionEmbeddingUseCase(BaseProcessor):
|
|
|
1039
1711
|
|
|
1040
1712
|
return processed_data
|
|
1041
1713
|
|
|
1714
|
+
def _build_search_id(self, track_key: Any, frame_id: Optional[Any]) -> str:
|
|
1715
|
+
"""Generate a deterministic Redis search identifier per detection."""
|
|
1716
|
+
base_frame = frame_id if frame_id is not None else self._total_frame_counter
|
|
1717
|
+
safe_track = str(track_key if track_key is not None else "na").replace(" ", "_")
|
|
1718
|
+
return f"face_{base_frame}_{safe_track}"
|
|
1719
|
+
|
|
1042
1720
|
def _extract_frame_from_data(self, input_bytes: bytes) -> Optional[np.ndarray]:
|
|
1043
1721
|
"""
|
|
1044
1722
|
Extract frame from original model data
|
|
@@ -1184,6 +1862,9 @@ class FaceRecognitionEmbeddingUseCase(BaseProcessor):
|
|
|
1184
1862
|
h_box = max(1, y2 - y1)
|
|
1185
1863
|
frame_id = detection.get("frame_id", None) #TODO: Maybe replace this with stream_info frame_id
|
|
1186
1864
|
|
|
1865
|
+
track_key = track_id if track_id is not None else f"no_track_{id(detection)}"
|
|
1866
|
+
search_id = self._build_search_id(track_key, frame_id)
|
|
1867
|
+
|
|
1187
1868
|
# Track probation age strictly by internal tracker id
|
|
1188
1869
|
if track_id is not None:
|
|
1189
1870
|
if track_id not in self._track_first_seen:
|
|
@@ -1207,7 +1888,6 @@ class FaceRecognitionEmbeddingUseCase(BaseProcessor):
|
|
|
1207
1888
|
detection_type = "unknown"
|
|
1208
1889
|
try:
|
|
1209
1890
|
if self.temporal_identity_manager:
|
|
1210
|
-
track_key = track_id if track_id is not None else f"no_track_{id(detection)}"
|
|
1211
1891
|
if not eligible_for_recognition:
|
|
1212
1892
|
# Mirror compare_similarity: when not eligible, keep stable label if present
|
|
1213
1893
|
s = self.temporal_identity_manager.tracks.get(track_key, {})
|
|
@@ -1243,6 +1923,7 @@ class FaceRecognitionEmbeddingUseCase(BaseProcessor):
|
|
|
1243
1923
|
eligible_for_recognition=True,
|
|
1244
1924
|
location=location,
|
|
1245
1925
|
timestamp=current_timestamp,
|
|
1926
|
+
search_id=search_id,
|
|
1246
1927
|
)
|
|
1247
1928
|
# print("------------------FACE RECOG TEMPORAL IDENTITY MANAGER UPDATE----------------------------")
|
|
1248
1929
|
# print("LATENCY:",(time.time() - st3)*1000,"| Throughput fps:",(1.0 / (time.time() - st3)) if (time.time() - st3) > 0 else None)
|
|
@@ -1667,7 +2348,7 @@ class FaceRecognitionEmbeddingUseCase(BaseProcessor):
|
|
|
1667
2348
|
)
|
|
1668
2349
|
|
|
1669
2350
|
|
|
1670
|
-
human_text_lines = [f"CURRENT FRAME @ {current_timestamp}"]
|
|
2351
|
+
human_text_lines = [f"CURRENT FRAME @ {current_timestamp}:"]
|
|
1671
2352
|
|
|
1672
2353
|
current_recognized = current_frame.get("recognized", 0)
|
|
1673
2354
|
current_unknown = current_frame.get("unknown", 0)
|
|
@@ -1675,8 +2356,8 @@ class FaceRecognitionEmbeddingUseCase(BaseProcessor):
|
|
|
1675
2356
|
total_current = current_recognized + current_unknown
|
|
1676
2357
|
|
|
1677
2358
|
# Show staff names and IDs being recognized in current frame (with tabs)
|
|
1678
|
-
human_text_lines.append(f"\
|
|
1679
|
-
human_text_lines.append(f"\
|
|
2359
|
+
human_text_lines.append(f"\t- Current Total Faces: {total_current}")
|
|
2360
|
+
human_text_lines.append(f"\t- Current Recognized: {current_recognized}")
|
|
1680
2361
|
|
|
1681
2362
|
if recognized_persons:
|
|
1682
2363
|
for person_id in recognized_persons.keys():
|
|
@@ -1684,15 +2365,15 @@ class FaceRecognitionEmbeddingUseCase(BaseProcessor):
|
|
|
1684
2365
|
staff_name = (current_frame_staff_details or {}).get(
|
|
1685
2366
|
person_id, f"Staff {person_id}"
|
|
1686
2367
|
)
|
|
1687
|
-
human_text_lines.append(f"\
|
|
1688
|
-
human_text_lines.append(f"\
|
|
2368
|
+
human_text_lines.append(f"\t\t- Name: {staff_name} (ID: {person_id})")
|
|
2369
|
+
human_text_lines.append(f"\t- Current Unknown: {current_unknown}")
|
|
1689
2370
|
|
|
1690
2371
|
# Show current frame counts only (with tabs)
|
|
1691
2372
|
human_text_lines.append("")
|
|
1692
|
-
human_text_lines.append(f"TOTAL SINCE @ {start_timestamp}")
|
|
1693
|
-
human_text_lines.append(f"\tTotal Faces: {cumulative_total}")
|
|
1694
|
-
human_text_lines.append(f"\tRecognized: {face_summary.get('session_totals',{}).get('total_recognized', 0)}")
|
|
1695
|
-
human_text_lines.append(f"\tUnknown: {face_summary.get('session_totals',{}).get('total_unknown', 0)}")
|
|
2373
|
+
# human_text_lines.append(f"TOTAL SINCE @ {start_timestamp}")
|
|
2374
|
+
# human_text_lines.append(f"\tTotal Faces: {cumulative_total}")
|
|
2375
|
+
# human_text_lines.append(f"\tRecognized: {face_summary.get('session_totals',{}).get('total_recognized', 0)}")
|
|
2376
|
+
# human_text_lines.append(f"\tUnknown: {face_summary.get('session_totals',{}).get('total_unknown', 0)}")
|
|
1696
2377
|
# Additional counts similar to compare_similarity HUD
|
|
1697
2378
|
# try:
|
|
1698
2379
|
# human_text_lines.append(f"\tCurrent Faces (detections): {total_detections}")
|
|
@@ -1702,15 +2383,15 @@ class FaceRecognitionEmbeddingUseCase(BaseProcessor):
|
|
|
1702
2383
|
|
|
1703
2384
|
human_text = "\n".join(human_text_lines)
|
|
1704
2385
|
|
|
1705
|
-
if alerts:
|
|
1706
|
-
|
|
1707
|
-
|
|
1708
|
-
|
|
1709
|
-
|
|
1710
|
-
else:
|
|
1711
|
-
|
|
2386
|
+
# if alerts:
|
|
2387
|
+
# for alert in alerts:
|
|
2388
|
+
# human_text_lines.append(
|
|
2389
|
+
# f"Alerts: {alert.get('settings', {})} sent @ {current_timestamp}"
|
|
2390
|
+
# )
|
|
2391
|
+
# else:
|
|
2392
|
+
# human_text_lines.append("Alerts: None")
|
|
1712
2393
|
|
|
1713
|
-
human_text = "\n".join(human_text_lines)
|
|
2394
|
+
# human_text = "\n".join(human_text_lines)
|
|
1714
2395
|
reset_settings = [
|
|
1715
2396
|
{"interval_type": "daily", "reset_time": {"value": 9, "time_unit": "hour"}}
|
|
1716
2397
|
]
|