matrice-streaming 0.1.14__py3-none-any.whl → 0.1.65__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- matrice_streaming/__init__.py +44 -32
- matrice_streaming/streaming_gateway/camera_streamer/__init__.py +68 -1
- matrice_streaming/streaming_gateway/camera_streamer/async_camera_worker.py +1388 -0
- matrice_streaming/streaming_gateway/camera_streamer/async_ffmpeg_worker.py +966 -0
- matrice_streaming/streaming_gateway/camera_streamer/camera_streamer.py +188 -24
- matrice_streaming/streaming_gateway/camera_streamer/device_detection.py +507 -0
- matrice_streaming/streaming_gateway/camera_streamer/encoding_pool_manager.py +136 -0
- matrice_streaming/streaming_gateway/camera_streamer/ffmpeg_camera_streamer.py +1048 -0
- matrice_streaming/streaming_gateway/camera_streamer/ffmpeg_config.py +192 -0
- matrice_streaming/streaming_gateway/camera_streamer/ffmpeg_worker_manager.py +470 -0
- matrice_streaming/streaming_gateway/camera_streamer/gstreamer_camera_streamer.py +1368 -0
- matrice_streaming/streaming_gateway/camera_streamer/gstreamer_worker.py +1063 -0
- matrice_streaming/streaming_gateway/camera_streamer/gstreamer_worker_manager.py +546 -0
- matrice_streaming/streaming_gateway/camera_streamer/message_builder.py +60 -15
- matrice_streaming/streaming_gateway/camera_streamer/nvdec.py +1330 -0
- matrice_streaming/streaming_gateway/camera_streamer/nvdec_worker_manager.py +412 -0
- matrice_streaming/streaming_gateway/camera_streamer/platform_pipelines.py +680 -0
- matrice_streaming/streaming_gateway/camera_streamer/stream_statistics.py +111 -4
- matrice_streaming/streaming_gateway/camera_streamer/video_capture_manager.py +223 -27
- matrice_streaming/streaming_gateway/camera_streamer/worker_manager.py +694 -0
- matrice_streaming/streaming_gateway/debug/__init__.py +27 -2
- matrice_streaming/streaming_gateway/debug/benchmark.py +727 -0
- matrice_streaming/streaming_gateway/debug/debug_gstreamer_gateway.py +599 -0
- matrice_streaming/streaming_gateway/debug/debug_streaming_gateway.py +245 -95
- matrice_streaming/streaming_gateway/debug/debug_utils.py +29 -0
- matrice_streaming/streaming_gateway/debug/test_videoplayback.py +318 -0
- matrice_streaming/streaming_gateway/dynamic_camera_manager.py +656 -39
- matrice_streaming/streaming_gateway/metrics_reporter.py +676 -139
- matrice_streaming/streaming_gateway/streaming_action.py +71 -20
- matrice_streaming/streaming_gateway/streaming_gateway.py +1026 -78
- matrice_streaming/streaming_gateway/streaming_gateway_utils.py +175 -20
- matrice_streaming/streaming_gateway/streaming_status_listener.py +89 -0
- {matrice_streaming-0.1.14.dist-info → matrice_streaming-0.1.65.dist-info}/METADATA +1 -1
- matrice_streaming-0.1.65.dist-info/RECORD +56 -0
- matrice_streaming-0.1.14.dist-info/RECORD +0 -38
- {matrice_streaming-0.1.14.dist-info → matrice_streaming-0.1.65.dist-info}/WHEEL +0 -0
- {matrice_streaming-0.1.14.dist-info → matrice_streaming-0.1.65.dist-info}/licenses/LICENSE.txt +0 -0
- {matrice_streaming-0.1.14.dist-info → matrice_streaming-0.1.65.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,966 @@
|
|
|
1
|
+
"""Async FFmpeg worker process for handling multiple cameras concurrently.
|
|
2
|
+
|
|
3
|
+
This module implements an async event loop worker that uses FFmpeg subprocess
|
|
4
|
+
pipelines for video ingestion. This provides better performance than OpenCV
|
|
5
|
+
by isolating decoder threads from the Python GIL.
|
|
6
|
+
"""
|
|
7
|
+
import asyncio
|
|
8
|
+
import logging
|
|
9
|
+
import time
|
|
10
|
+
import multiprocessing
|
|
11
|
+
import os
|
|
12
|
+
import sys
|
|
13
|
+
import signal
|
|
14
|
+
import subprocess
|
|
15
|
+
from typing import Dict, Any, Optional, List, Tuple
|
|
16
|
+
from concurrent.futures import ThreadPoolExecutor
|
|
17
|
+
from collections import deque
|
|
18
|
+
|
|
19
|
+
import numpy as np
|
|
20
|
+
import cv2
|
|
21
|
+
import psutil
|
|
22
|
+
|
|
23
|
+
from matrice_common.optimize import FrameOptimizer
|
|
24
|
+
|
|
25
|
+
from .ffmpeg_config import FFmpegConfig, is_ffmpeg_available
|
|
26
|
+
from .ffmpeg_camera_streamer import FFmpegPipeline
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
# Disable threading in numerical libraries
|
|
30
|
+
os.environ["OMP_NUM_THREADS"] = "1"
|
|
31
|
+
os.environ["OPENBLAS_NUM_THREADS"] = "1"
|
|
32
|
+
os.environ["MKL_NUM_THREADS"] = "1"
|
|
33
|
+
os.environ["NUMEXPR_NUM_THREADS"] = "1"
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def pin_to_cores(worker_id: int, total_workers: int) -> Optional[List[int]]:
|
|
37
|
+
"""Pin worker process to specific CPU cores for cache locality.
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
worker_id: Worker identifier (0-indexed)
|
|
41
|
+
total_workers: Total number of worker processes
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
List of CPU core indices this worker is pinned to, or None if pinning failed
|
|
45
|
+
"""
|
|
46
|
+
try:
|
|
47
|
+
p = psutil.Process()
|
|
48
|
+
cpu_count = psutil.cpu_count(logical=True)
|
|
49
|
+
cores_per_worker = max(1, cpu_count // total_workers)
|
|
50
|
+
|
|
51
|
+
start_core = worker_id * cores_per_worker
|
|
52
|
+
end_core = min(start_core + cores_per_worker, cpu_count)
|
|
53
|
+
|
|
54
|
+
core_list = list(range(start_core, end_core))
|
|
55
|
+
if core_list:
|
|
56
|
+
p.cpu_affinity(core_list)
|
|
57
|
+
return core_list
|
|
58
|
+
except Exception:
|
|
59
|
+
pass
|
|
60
|
+
return None
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
class AsyncFFmpegWorker:
|
|
64
|
+
"""Async worker process that handles multiple cameras using FFmpeg pipelines.
|
|
65
|
+
|
|
66
|
+
This worker runs an async event loop to handle I/O-bound operations
|
|
67
|
+
for multiple cameras efficiently, using FFmpeg subprocesses for video
|
|
68
|
+
decoding instead of OpenCV.
|
|
69
|
+
"""
|
|
70
|
+
|
|
71
|
+
def __init__(
|
|
72
|
+
self,
|
|
73
|
+
worker_id: int,
|
|
74
|
+
camera_configs: List[Dict[str, Any]],
|
|
75
|
+
stream_config: Dict[str, Any],
|
|
76
|
+
stop_event: multiprocessing.Event,
|
|
77
|
+
health_queue: multiprocessing.Queue,
|
|
78
|
+
command_queue: Optional[multiprocessing.Queue] = None,
|
|
79
|
+
response_queue: Optional[multiprocessing.Queue] = None,
|
|
80
|
+
ffmpeg_config: Optional[FFmpegConfig] = None,
|
|
81
|
+
# SHM mode options
|
|
82
|
+
use_shm: bool = False,
|
|
83
|
+
shm_slot_count: int = 1000,
|
|
84
|
+
shm_frame_format: str = "BGR",
|
|
85
|
+
# Performance options
|
|
86
|
+
pin_cpu_affinity: bool = True,
|
|
87
|
+
total_workers: int = 1,
|
|
88
|
+
# Frame optimizer options
|
|
89
|
+
frame_optimizer_enabled: bool = True,
|
|
90
|
+
frame_optimizer_config: Optional[Dict[str, Any]] = None,
|
|
91
|
+
):
|
|
92
|
+
"""Initialize async FFmpeg worker.
|
|
93
|
+
|
|
94
|
+
Args:
|
|
95
|
+
worker_id: Unique identifier for this worker
|
|
96
|
+
camera_configs: List of camera configurations to handle
|
|
97
|
+
stream_config: Streaming configuration (Redis, Kafka, etc.)
|
|
98
|
+
stop_event: Event to signal worker shutdown
|
|
99
|
+
health_queue: Queue for reporting health status
|
|
100
|
+
command_queue: Queue for receiving dynamic camera commands
|
|
101
|
+
response_queue: Queue for sending command responses
|
|
102
|
+
ffmpeg_config: FFmpeg configuration options
|
|
103
|
+
use_shm: Enable SHM mode for raw frame sharing
|
|
104
|
+
shm_slot_count: Number of frame slots per camera ring buffer
|
|
105
|
+
shm_frame_format: Frame format for SHM storage
|
|
106
|
+
pin_cpu_affinity: Pin worker to specific CPU cores
|
|
107
|
+
total_workers: Total number of workers for CPU affinity calculation
|
|
108
|
+
frame_optimizer_enabled: Enable frame optimizer for skipping similar frames
|
|
109
|
+
frame_optimizer_config: Frame optimizer configuration dict
|
|
110
|
+
"""
|
|
111
|
+
self.worker_id = worker_id
|
|
112
|
+
self.camera_configs = camera_configs
|
|
113
|
+
self.stream_config = stream_config
|
|
114
|
+
self.stop_event = stop_event
|
|
115
|
+
self.health_queue = health_queue
|
|
116
|
+
self.command_queue = command_queue
|
|
117
|
+
self.response_queue = response_queue
|
|
118
|
+
self.ffmpeg_config = ffmpeg_config or FFmpegConfig()
|
|
119
|
+
|
|
120
|
+
# Setup logging
|
|
121
|
+
self.logger = logging.getLogger(f"AsyncFFmpegWorker-{worker_id}")
|
|
122
|
+
self.logger.info(f"Initializing FFmpeg worker {worker_id} with {len(camera_configs)} cameras")
|
|
123
|
+
|
|
124
|
+
# Track camera tasks and pipelines
|
|
125
|
+
self.camera_tasks: Dict[str, asyncio.Task] = {}
|
|
126
|
+
self.pipelines: Dict[str, FFmpegPipeline] = {}
|
|
127
|
+
|
|
128
|
+
# Redis/stream client
|
|
129
|
+
self.redis_client = None
|
|
130
|
+
|
|
131
|
+
# SHM configuration
|
|
132
|
+
self.use_shm = use_shm
|
|
133
|
+
self.shm_slot_count = shm_slot_count
|
|
134
|
+
self.shm_frame_format = shm_frame_format
|
|
135
|
+
self._shm_buffers: Dict[str, Any] = {}
|
|
136
|
+
self._last_shm_frame_idx: Dict[str, int] = {}
|
|
137
|
+
|
|
138
|
+
# Initialize frame optimizer for skipping similar frames
|
|
139
|
+
frame_optimizer_config = frame_optimizer_config or {}
|
|
140
|
+
self.frame_optimizer = FrameOptimizer(
|
|
141
|
+
enabled=frame_optimizer_enabled,
|
|
142
|
+
scale=frame_optimizer_config.get("scale", 0.4),
|
|
143
|
+
diff_threshold=frame_optimizer_config.get("diff_threshold", 15),
|
|
144
|
+
similarity_threshold=frame_optimizer_config.get("similarity_threshold", 0.05),
|
|
145
|
+
bg_update_interval=frame_optimizer_config.get("bg_update_interval", 10),
|
|
146
|
+
)
|
|
147
|
+
self._last_sent_frame_ids: Dict[str, str] = {} # stream_key -> last sent frame_id
|
|
148
|
+
|
|
149
|
+
# Register atexit handler for SHM cleanup
|
|
150
|
+
if use_shm:
|
|
151
|
+
import atexit
|
|
152
|
+
atexit.register(self._cleanup_shm_on_exit)
|
|
153
|
+
|
|
154
|
+
# CPU affinity
|
|
155
|
+
self.pin_cpu_affinity = pin_cpu_affinity
|
|
156
|
+
self.total_workers = total_workers
|
|
157
|
+
self.pinned_cores: Optional[List[int]] = None
|
|
158
|
+
|
|
159
|
+
# Apply CPU affinity
|
|
160
|
+
if pin_cpu_affinity:
|
|
161
|
+
self.pinned_cores = pin_to_cores(worker_id, total_workers)
|
|
162
|
+
if self.pinned_cores:
|
|
163
|
+
self.logger.info(
|
|
164
|
+
f"Worker {worker_id}: CPU affinity pinned to cores "
|
|
165
|
+
f"{self.pinned_cores[0]}-{self.pinned_cores[-1]}"
|
|
166
|
+
)
|
|
167
|
+
|
|
168
|
+
# Thread pool for blocking FFmpeg operations
|
|
169
|
+
num_cameras = len(camera_configs)
|
|
170
|
+
num_threads = min(64, max(8, num_cameras))
|
|
171
|
+
self.executor = ThreadPoolExecutor(max_workers=num_threads)
|
|
172
|
+
|
|
173
|
+
# Metrics
|
|
174
|
+
self._encoding_times: deque = deque(maxlen=100)
|
|
175
|
+
self._frame_times: deque = deque(maxlen=100)
|
|
176
|
+
self._frames_encoded = 0
|
|
177
|
+
self._encoding_errors = 0
|
|
178
|
+
self._last_metrics_log = time.time()
|
|
179
|
+
self._process_info = psutil.Process(os.getpid())
|
|
180
|
+
|
|
181
|
+
# Per-camera metrics for periodic FPS logging
|
|
182
|
+
self._metrics_log_interval = 30.0 # Log metrics every 30 seconds
|
|
183
|
+
self._frames_per_camera: Dict[str, int] = {}
|
|
184
|
+
self._last_fps_check_time = time.time()
|
|
185
|
+
|
|
186
|
+
self.logger.info(
|
|
187
|
+
f"Worker {worker_id}: FFmpeg worker initialized with {num_threads} threads"
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
async def initialize(self):
|
|
191
|
+
"""Initialize async resources (Redis client, etc.)."""
|
|
192
|
+
try:
|
|
193
|
+
from matrice_common.stream import MatriceStream, StreamType
|
|
194
|
+
|
|
195
|
+
self.stream = MatriceStream(
|
|
196
|
+
stream_type=StreamType.REDIS,
|
|
197
|
+
enable_shm_batching=True,
|
|
198
|
+
**self.stream_config
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
self.redis_client = self.stream.async_client
|
|
202
|
+
await self.redis_client.setup_client()
|
|
203
|
+
|
|
204
|
+
self.logger.info(f"Worker {self.worker_id}: Initialized async Redis client")
|
|
205
|
+
except Exception as e:
|
|
206
|
+
self.logger.error(f"Worker {self.worker_id}: Failed to initialize: {e}")
|
|
207
|
+
raise
|
|
208
|
+
|
|
209
|
+
def _cleanup_shm_on_exit(self):
|
|
210
|
+
"""Cleanup SHM buffers on exit."""
|
|
211
|
+
for camera_id, shm_buffer in list(self._shm_buffers.items()):
|
|
212
|
+
try:
|
|
213
|
+
shm_buffer.close()
|
|
214
|
+
except Exception:
|
|
215
|
+
pass
|
|
216
|
+
|
|
217
|
+
def _get_or_create_shm_buffer(self, camera_id: str, width: int, height: int):
|
|
218
|
+
"""Get existing or create new SHM buffer for camera.
|
|
219
|
+
|
|
220
|
+
Args:
|
|
221
|
+
camera_id: Camera stream key
|
|
222
|
+
width: Frame width
|
|
223
|
+
height: Frame height
|
|
224
|
+
|
|
225
|
+
Returns:
|
|
226
|
+
ShmRingBuffer instance for this camera
|
|
227
|
+
"""
|
|
228
|
+
if camera_id not in self._shm_buffers:
|
|
229
|
+
from matrice_common.stream.shm_ring_buffer import ShmRingBuffer
|
|
230
|
+
format_map = {
|
|
231
|
+
"BGR": ShmRingBuffer.FORMAT_BGR,
|
|
232
|
+
"RGB": ShmRingBuffer.FORMAT_RGB,
|
|
233
|
+
"NV12": ShmRingBuffer.FORMAT_NV12
|
|
234
|
+
}
|
|
235
|
+
frame_format = format_map.get(self.shm_frame_format, ShmRingBuffer.FORMAT_BGR)
|
|
236
|
+
self._shm_buffers[camera_id] = ShmRingBuffer(
|
|
237
|
+
camera_id=camera_id,
|
|
238
|
+
width=width,
|
|
239
|
+
height=height,
|
|
240
|
+
frame_format=frame_format,
|
|
241
|
+
slot_count=self.shm_slot_count,
|
|
242
|
+
create=True
|
|
243
|
+
)
|
|
244
|
+
self.logger.info(
|
|
245
|
+
f"Worker {self.worker_id}: Created SHM buffer for {camera_id} - "
|
|
246
|
+
f"{width}x{height} @ {self.shm_frame_format}, {self.shm_slot_count} slots"
|
|
247
|
+
)
|
|
248
|
+
return self._shm_buffers[camera_id]
|
|
249
|
+
|
|
250
|
+
async def run(self):
|
|
251
|
+
"""Main worker loop - starts async tasks for all cameras."""
|
|
252
|
+
try:
|
|
253
|
+
await self.initialize()
|
|
254
|
+
|
|
255
|
+
# Start camera tasks
|
|
256
|
+
for camera_config in self.camera_configs:
|
|
257
|
+
await self._add_camera_internal(camera_config)
|
|
258
|
+
|
|
259
|
+
# Report initial health
|
|
260
|
+
self._report_health("running", len(self.camera_tasks))
|
|
261
|
+
|
|
262
|
+
# Start command handler if queue provided
|
|
263
|
+
command_task = None
|
|
264
|
+
if self.command_queue:
|
|
265
|
+
command_task = asyncio.create_task(
|
|
266
|
+
self._command_handler(),
|
|
267
|
+
name="command-handler"
|
|
268
|
+
)
|
|
269
|
+
|
|
270
|
+
# Monitor loop
|
|
271
|
+
while not self.stop_event.is_set():
|
|
272
|
+
# Check for completed/failed tasks
|
|
273
|
+
for stream_key, task in list(self.camera_tasks.items()):
|
|
274
|
+
if task.done():
|
|
275
|
+
try:
|
|
276
|
+
task.result()
|
|
277
|
+
except Exception as e:
|
|
278
|
+
self.logger.error(f"Camera {stream_key} task failed: {e}")
|
|
279
|
+
del self.camera_tasks[stream_key]
|
|
280
|
+
|
|
281
|
+
self._report_health("running", len(self.camera_tasks))
|
|
282
|
+
|
|
283
|
+
# Log metrics periodically
|
|
284
|
+
if time.time() - self._last_metrics_log > self._metrics_log_interval:
|
|
285
|
+
self._log_metrics()
|
|
286
|
+
|
|
287
|
+
await asyncio.sleep(1.0)
|
|
288
|
+
|
|
289
|
+
# Shutdown
|
|
290
|
+
self.logger.info(f"Worker {self.worker_id}: Stop event detected, shutting down...")
|
|
291
|
+
|
|
292
|
+
if command_task and not command_task.done():
|
|
293
|
+
command_task.cancel()
|
|
294
|
+
try:
|
|
295
|
+
await command_task
|
|
296
|
+
except asyncio.CancelledError:
|
|
297
|
+
pass
|
|
298
|
+
|
|
299
|
+
await self._shutdown()
|
|
300
|
+
|
|
301
|
+
except Exception as e:
|
|
302
|
+
self.logger.error(f"Worker {self.worker_id}: Fatal error: {e}", exc_info=True)
|
|
303
|
+
self._report_health("error", error=str(e))
|
|
304
|
+
raise
|
|
305
|
+
|
|
306
|
+
async def _camera_handler(self, camera_config: Dict[str, Any]):
|
|
307
|
+
"""Handle a single camera with FFmpeg pipeline.
|
|
308
|
+
|
|
309
|
+
Args:
|
|
310
|
+
camera_config: Camera configuration dictionary
|
|
311
|
+
"""
|
|
312
|
+
stream_key = camera_config['stream_key']
|
|
313
|
+
stream_group_key = camera_config.get('stream_group_key', 'default')
|
|
314
|
+
source = camera_config['source']
|
|
315
|
+
topic = camera_config['topic']
|
|
316
|
+
fps = camera_config.get('fps', 30)
|
|
317
|
+
quality = camera_config.get('quality', 90)
|
|
318
|
+
width = camera_config.get('width', 0)
|
|
319
|
+
height = camera_config.get('height', 0)
|
|
320
|
+
camera_location = camera_config.get('camera_location', 'Unknown')
|
|
321
|
+
simulate_video_file_stream = camera_config.get('simulate_video_file_stream', False)
|
|
322
|
+
|
|
323
|
+
# Retry settings
|
|
324
|
+
MIN_RETRY_COOLDOWN = 5
|
|
325
|
+
MAX_RETRY_COOLDOWN = 30
|
|
326
|
+
retry_cycle = 0
|
|
327
|
+
max_consecutive_failures = 10
|
|
328
|
+
|
|
329
|
+
# Create FFmpeg config for this camera
|
|
330
|
+
cam_config = FFmpegConfig(
|
|
331
|
+
hwaccel=self.ffmpeg_config.hwaccel,
|
|
332
|
+
pixel_format=self.ffmpeg_config.pixel_format,
|
|
333
|
+
low_latency=self.ffmpeg_config.low_latency,
|
|
334
|
+
loop=simulate_video_file_stream,
|
|
335
|
+
output_width=width,
|
|
336
|
+
output_height=height,
|
|
337
|
+
)
|
|
338
|
+
|
|
339
|
+
while not self.stop_event.is_set():
|
|
340
|
+
pipeline = None
|
|
341
|
+
consecutive_failures = 0
|
|
342
|
+
frame_counter = 0
|
|
343
|
+
|
|
344
|
+
try:
|
|
345
|
+
# Create FFmpeg pipeline
|
|
346
|
+
pipeline = FFmpegPipeline(
|
|
347
|
+
source=str(source),
|
|
348
|
+
width=width,
|
|
349
|
+
height=height,
|
|
350
|
+
config=cam_config,
|
|
351
|
+
stream_key=stream_key,
|
|
352
|
+
)
|
|
353
|
+
self.pipelines[stream_key] = pipeline
|
|
354
|
+
|
|
355
|
+
retry_cycle = 0
|
|
356
|
+
self.logger.info(
|
|
357
|
+
f"Worker {self.worker_id}: Camera {stream_key} connected via FFmpeg - "
|
|
358
|
+
f"{pipeline.width}x{pipeline.height} @ {fps} FPS"
|
|
359
|
+
)
|
|
360
|
+
|
|
361
|
+
# Frame processing loop
|
|
362
|
+
while not self.stop_event.is_set():
|
|
363
|
+
try:
|
|
364
|
+
read_start = time.time()
|
|
365
|
+
|
|
366
|
+
# Read frame from FFmpeg pipeline (async)
|
|
367
|
+
frame = await pipeline.read_frame_async(self.executor)
|
|
368
|
+
read_time = time.time() - read_start
|
|
369
|
+
|
|
370
|
+
if frame is None:
|
|
371
|
+
consecutive_failures += 1
|
|
372
|
+
if consecutive_failures >= max_consecutive_failures:
|
|
373
|
+
self.logger.warning(
|
|
374
|
+
f"Worker {self.worker_id}: Camera {stream_key} - "
|
|
375
|
+
f"{max_consecutive_failures} consecutive failures, reconnecting..."
|
|
376
|
+
)
|
|
377
|
+
break
|
|
378
|
+
await asyncio.sleep(0.1)
|
|
379
|
+
continue
|
|
380
|
+
|
|
381
|
+
consecutive_failures = 0
|
|
382
|
+
frame_counter += 1
|
|
383
|
+
|
|
384
|
+
# Process and send frame (SHM mode vs JPEG mode)
|
|
385
|
+
if self.use_shm:
|
|
386
|
+
await self._process_frame_shm_mode(
|
|
387
|
+
frame=frame,
|
|
388
|
+
stream_key=stream_key,
|
|
389
|
+
stream_group_key=stream_group_key,
|
|
390
|
+
topic=topic,
|
|
391
|
+
width=pipeline.width,
|
|
392
|
+
height=pipeline.height,
|
|
393
|
+
frame_counter=frame_counter,
|
|
394
|
+
camera_location=camera_location,
|
|
395
|
+
read_time=read_time,
|
|
396
|
+
)
|
|
397
|
+
else:
|
|
398
|
+
await self._process_and_send_frame(
|
|
399
|
+
frame=frame,
|
|
400
|
+
stream_key=stream_key,
|
|
401
|
+
stream_group_key=stream_group_key,
|
|
402
|
+
topic=topic,
|
|
403
|
+
width=pipeline.width,
|
|
404
|
+
height=pipeline.height,
|
|
405
|
+
quality=quality,
|
|
406
|
+
frame_counter=frame_counter,
|
|
407
|
+
camera_location=camera_location,
|
|
408
|
+
read_time=read_time,
|
|
409
|
+
)
|
|
410
|
+
|
|
411
|
+
# Maintain target FPS
|
|
412
|
+
frame_interval = 1.0 / fps
|
|
413
|
+
elapsed = time.time() - read_start
|
|
414
|
+
sleep_time = max(0, frame_interval - elapsed)
|
|
415
|
+
if sleep_time > 0:
|
|
416
|
+
await asyncio.sleep(sleep_time)
|
|
417
|
+
|
|
418
|
+
except asyncio.CancelledError:
|
|
419
|
+
self.logger.info(f"Camera {stream_key} task cancelled")
|
|
420
|
+
return
|
|
421
|
+
except Exception as e:
|
|
422
|
+
self.logger.error(f"Error processing camera {stream_key}: {e}")
|
|
423
|
+
consecutive_failures += 1
|
|
424
|
+
if consecutive_failures >= max_consecutive_failures:
|
|
425
|
+
break
|
|
426
|
+
await asyncio.sleep(1.0)
|
|
427
|
+
|
|
428
|
+
except asyncio.CancelledError:
|
|
429
|
+
return
|
|
430
|
+
except Exception as e:
|
|
431
|
+
self.logger.error(f"Camera {stream_key} connection error: {e}")
|
|
432
|
+
finally:
|
|
433
|
+
if pipeline:
|
|
434
|
+
pipeline.close()
|
|
435
|
+
if stream_key in self.pipelines:
|
|
436
|
+
del self.pipelines[stream_key]
|
|
437
|
+
|
|
438
|
+
if self.stop_event.is_set():
|
|
439
|
+
break
|
|
440
|
+
|
|
441
|
+
# Exponential backoff before retry
|
|
442
|
+
cooldown = min(MAX_RETRY_COOLDOWN, MIN_RETRY_COOLDOWN + retry_cycle)
|
|
443
|
+
self.logger.info(f"Retrying camera {stream_key} in {cooldown}s")
|
|
444
|
+
await asyncio.sleep(cooldown)
|
|
445
|
+
retry_cycle += 1
|
|
446
|
+
|
|
447
|
+
async def _process_and_send_frame(
|
|
448
|
+
self,
|
|
449
|
+
frame: np.ndarray,
|
|
450
|
+
stream_key: str,
|
|
451
|
+
stream_group_key: str,
|
|
452
|
+
topic: str,
|
|
453
|
+
width: int,
|
|
454
|
+
height: int,
|
|
455
|
+
quality: int,
|
|
456
|
+
frame_counter: int,
|
|
457
|
+
camera_location: str,
|
|
458
|
+
read_time: float,
|
|
459
|
+
):
|
|
460
|
+
"""Process frame and send to Redis.
|
|
461
|
+
|
|
462
|
+
Args:
|
|
463
|
+
frame: Raw frame from FFmpeg
|
|
464
|
+
stream_key: Stream identifier
|
|
465
|
+
stream_group_key: Stream group identifier
|
|
466
|
+
topic: Redis topic
|
|
467
|
+
width: Frame width
|
|
468
|
+
height: Frame height
|
|
469
|
+
quality: JPEG quality
|
|
470
|
+
frame_counter: Current frame number
|
|
471
|
+
camera_location: Camera location
|
|
472
|
+
read_time: Time taken to read frame
|
|
473
|
+
"""
|
|
474
|
+
frame_start = time.time()
|
|
475
|
+
|
|
476
|
+
# Check frame similarity BEFORE encoding (saves CPU if frame is similar)
|
|
477
|
+
is_similar, similarity_score = self.frame_optimizer.is_similar(frame, stream_key)
|
|
478
|
+
reference_frame_id = self._last_sent_frame_ids.get(stream_key)
|
|
479
|
+
|
|
480
|
+
import uuid
|
|
481
|
+
|
|
482
|
+
if is_similar and reference_frame_id:
|
|
483
|
+
# Frame is similar - send message with empty content + cached_frame_id
|
|
484
|
+
message = {
|
|
485
|
+
"frame_id": str(uuid.uuid4()),
|
|
486
|
+
"input_name": stream_key,
|
|
487
|
+
"input_stream": {
|
|
488
|
+
"content": b"", # EMPTY content for cached frame
|
|
489
|
+
"metadata": {
|
|
490
|
+
"width": width,
|
|
491
|
+
"height": height,
|
|
492
|
+
"frame_count": frame_counter,
|
|
493
|
+
"camera_location": camera_location,
|
|
494
|
+
"stream_group_key": stream_group_key,
|
|
495
|
+
"encoding_type": "cached",
|
|
496
|
+
"codec": "cached",
|
|
497
|
+
"feed_type": "ffmpeg",
|
|
498
|
+
"timestamp": time.time(),
|
|
499
|
+
"similarity_score": similarity_score,
|
|
500
|
+
"cached_frame_id": reference_frame_id,
|
|
501
|
+
},
|
|
502
|
+
},
|
|
503
|
+
}
|
|
504
|
+
|
|
505
|
+
# Send to Redis
|
|
506
|
+
write_start = time.time()
|
|
507
|
+
await self.redis_client.add_message(topic, message)
|
|
508
|
+
write_time = time.time() - write_start
|
|
509
|
+
|
|
510
|
+
# Track metrics (no encoding)
|
|
511
|
+
self._frames_per_camera[stream_key] = self._frames_per_camera.get(stream_key, 0) + 1
|
|
512
|
+
total_time = time.time() - frame_start
|
|
513
|
+
self._frame_times.append(total_time)
|
|
514
|
+
return
|
|
515
|
+
|
|
516
|
+
# Frame is different - encode and send full frame
|
|
517
|
+
encode_start = time.time()
|
|
518
|
+
success, jpeg_buffer = cv2.imencode(
|
|
519
|
+
'.jpg', frame, [int(cv2.IMWRITE_JPEG_QUALITY), quality]
|
|
520
|
+
)
|
|
521
|
+
encode_time = time.time() - encode_start
|
|
522
|
+
|
|
523
|
+
if not success:
|
|
524
|
+
self._encoding_errors += 1
|
|
525
|
+
return
|
|
526
|
+
|
|
527
|
+
self._frames_encoded += 1
|
|
528
|
+
self._frames_per_camera[stream_key] = self._frames_per_camera.get(stream_key, 0) + 1
|
|
529
|
+
self._encoding_times.append(encode_time)
|
|
530
|
+
|
|
531
|
+
frame_data = bytes(jpeg_buffer)
|
|
532
|
+
|
|
533
|
+
# Build message
|
|
534
|
+
new_frame_id = str(uuid.uuid4())
|
|
535
|
+
message = {
|
|
536
|
+
"frame_id": new_frame_id,
|
|
537
|
+
"input_name": stream_key,
|
|
538
|
+
"input_stream": {
|
|
539
|
+
"content": frame_data,
|
|
540
|
+
"metadata": {
|
|
541
|
+
"width": width,
|
|
542
|
+
"height": height,
|
|
543
|
+
"frame_count": frame_counter,
|
|
544
|
+
"camera_location": camera_location,
|
|
545
|
+
"stream_group_key": stream_group_key,
|
|
546
|
+
"encoding_type": "jpeg",
|
|
547
|
+
"codec": "h264",
|
|
548
|
+
"feed_type": "ffmpeg",
|
|
549
|
+
"timestamp": time.time(),
|
|
550
|
+
},
|
|
551
|
+
},
|
|
552
|
+
}
|
|
553
|
+
|
|
554
|
+
# Send to Redis
|
|
555
|
+
write_start = time.time()
|
|
556
|
+
await self.redis_client.add_message(topic, message)
|
|
557
|
+
write_time = time.time() - write_start
|
|
558
|
+
|
|
559
|
+
# Track this frame_id as the last sent for future reference frames
|
|
560
|
+
self._last_sent_frame_ids[stream_key] = new_frame_id
|
|
561
|
+
self.frame_optimizer.set_last_frame_id(stream_key, new_frame_id)
|
|
562
|
+
|
|
563
|
+
# Track metrics
|
|
564
|
+
total_time = time.time() - frame_start
|
|
565
|
+
self._frame_times.append(total_time)
|
|
566
|
+
|
|
567
|
+
async def _process_frame_shm_mode(
|
|
568
|
+
self,
|
|
569
|
+
frame: np.ndarray,
|
|
570
|
+
stream_key: str,
|
|
571
|
+
stream_group_key: str,
|
|
572
|
+
topic: str,
|
|
573
|
+
width: int,
|
|
574
|
+
height: int,
|
|
575
|
+
frame_counter: int,
|
|
576
|
+
camera_location: str,
|
|
577
|
+
read_time: float,
|
|
578
|
+
):
|
|
579
|
+
"""SHM_MODE: Write raw frame to SHM, send metadata to Redis.
|
|
580
|
+
|
|
581
|
+
Args:
|
|
582
|
+
frame: Raw frame from FFmpeg (BGR format)
|
|
583
|
+
stream_key: Stream identifier
|
|
584
|
+
stream_group_key: Stream group identifier
|
|
585
|
+
topic: Redis topic
|
|
586
|
+
width: Frame width
|
|
587
|
+
height: Frame height
|
|
588
|
+
frame_counter: Current frame number
|
|
589
|
+
camera_location: Camera location
|
|
590
|
+
read_time: Time taken to read frame
|
|
591
|
+
"""
|
|
592
|
+
frame_start = time.time()
|
|
593
|
+
|
|
594
|
+
# Check frame similarity BEFORE writing to SHM (saves SHM writes for static scenes)
|
|
595
|
+
is_similar, similarity_score = self.frame_optimizer.is_similar(frame, stream_key)
|
|
596
|
+
reference_frame_idx = self._last_shm_frame_idx.get(stream_key)
|
|
597
|
+
|
|
598
|
+
if is_similar and reference_frame_idx is not None:
|
|
599
|
+
# Frame is similar - send metadata with reference to previous frame
|
|
600
|
+
ts_ns = int(time.time() * 1e9)
|
|
601
|
+
shm_buffer = self._shm_buffers.get(stream_key)
|
|
602
|
+
|
|
603
|
+
await self.redis_client.add_shm_metadata(
|
|
604
|
+
stream_name=topic,
|
|
605
|
+
cam_id=stream_key,
|
|
606
|
+
shm_name=shm_buffer.shm_name if shm_buffer else "",
|
|
607
|
+
frame_idx=reference_frame_idx, # Reference to cached frame
|
|
608
|
+
slot=None, # No new slot written
|
|
609
|
+
ts_ns=ts_ns,
|
|
610
|
+
width=width,
|
|
611
|
+
height=height,
|
|
612
|
+
format=self.shm_frame_format,
|
|
613
|
+
is_similar=True,
|
|
614
|
+
reference_frame_idx=reference_frame_idx,
|
|
615
|
+
similarity_score=similarity_score,
|
|
616
|
+
stream_group_key=stream_group_key,
|
|
617
|
+
camera_location=camera_location,
|
|
618
|
+
frame_counter=frame_counter,
|
|
619
|
+
)
|
|
620
|
+
|
|
621
|
+
# Track metrics (no SHM write)
|
|
622
|
+
self._frames_per_camera[stream_key] = self._frames_per_camera.get(stream_key, 0) + 1
|
|
623
|
+
total_time = time.time() - frame_start
|
|
624
|
+
self._frame_times.append(total_time)
|
|
625
|
+
return
|
|
626
|
+
|
|
627
|
+
# Frame is different - write to SHM
|
|
628
|
+
# Get or create SHM buffer
|
|
629
|
+
shm_buffer = self._get_or_create_shm_buffer(stream_key, width, height)
|
|
630
|
+
|
|
631
|
+
# Convert frame to target format if needed
|
|
632
|
+
if self.shm_frame_format == "RGB":
|
|
633
|
+
raw_bytes = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB).tobytes()
|
|
634
|
+
elif self.shm_frame_format == "NV12":
|
|
635
|
+
from matrice_common.stream.shm_ring_buffer import bgr_to_nv12
|
|
636
|
+
raw_bytes = bgr_to_nv12(frame)
|
|
637
|
+
else: # BGR default
|
|
638
|
+
raw_bytes = frame.tobytes()
|
|
639
|
+
|
|
640
|
+
# Write to SHM
|
|
641
|
+
frame_idx, slot = shm_buffer.write_frame(raw_bytes)
|
|
642
|
+
self._last_shm_frame_idx[stream_key] = frame_idx
|
|
643
|
+
|
|
644
|
+
# Send metadata to Redis
|
|
645
|
+
ts_ns = int(time.time() * 1e9)
|
|
646
|
+
await self.redis_client.add_shm_metadata(
|
|
647
|
+
stream_name=topic,
|
|
648
|
+
cam_id=stream_key,
|
|
649
|
+
shm_name=shm_buffer.shm_name,
|
|
650
|
+
frame_idx=frame_idx,
|
|
651
|
+
slot=slot,
|
|
652
|
+
ts_ns=ts_ns,
|
|
653
|
+
width=width,
|
|
654
|
+
height=height,
|
|
655
|
+
format=self.shm_frame_format,
|
|
656
|
+
is_similar=False,
|
|
657
|
+
stream_group_key=stream_group_key,
|
|
658
|
+
camera_location=camera_location,
|
|
659
|
+
frame_counter=frame_counter,
|
|
660
|
+
)
|
|
661
|
+
|
|
662
|
+
# Track metrics
|
|
663
|
+
self._frames_encoded += 1
|
|
664
|
+
self._frames_per_camera[stream_key] = self._frames_per_camera.get(stream_key, 0) + 1
|
|
665
|
+
total_time = time.time() - frame_start
|
|
666
|
+
self._frame_times.append(total_time)
|
|
667
|
+
|
|
668
|
+
async def _command_handler(self):
|
|
669
|
+
"""Process commands from the manager."""
|
|
670
|
+
while not self.stop_event.is_set():
|
|
671
|
+
try:
|
|
672
|
+
command = await asyncio.get_event_loop().run_in_executor(
|
|
673
|
+
None, self._get_command_nonblocking
|
|
674
|
+
)
|
|
675
|
+
if command:
|
|
676
|
+
await self._process_command(command)
|
|
677
|
+
else:
|
|
678
|
+
await asyncio.sleep(0.1)
|
|
679
|
+
except asyncio.CancelledError:
|
|
680
|
+
break
|
|
681
|
+
except Exception as e:
|
|
682
|
+
self.logger.error(f"Error in command handler: {e}")
|
|
683
|
+
await asyncio.sleep(1.0)
|
|
684
|
+
|
|
685
|
+
def _get_command_nonblocking(self):
|
|
686
|
+
try:
|
|
687
|
+
return self.command_queue.get_nowait()
|
|
688
|
+
except Exception:
|
|
689
|
+
return None
|
|
690
|
+
|
|
691
|
+
async def _process_command(self, command: Dict[str, Any]):
|
|
692
|
+
"""Process a command."""
|
|
693
|
+
cmd_type = command.get('type')
|
|
694
|
+
self.logger.info(f"Processing command: {cmd_type}")
|
|
695
|
+
|
|
696
|
+
try:
|
|
697
|
+
if cmd_type == 'add_camera':
|
|
698
|
+
camera_config = command.get('camera_config')
|
|
699
|
+
success = await self._add_camera_internal(camera_config)
|
|
700
|
+
self._send_response(cmd_type, camera_config.get('stream_key'), success)
|
|
701
|
+
elif cmd_type == 'remove_camera':
|
|
702
|
+
stream_key = command.get('stream_key')
|
|
703
|
+
success = await self._remove_camera_internal(stream_key)
|
|
704
|
+
self._send_response(cmd_type, stream_key, success)
|
|
705
|
+
elif cmd_type == 'update_camera':
|
|
706
|
+
camera_config = command.get('camera_config')
|
|
707
|
+
stream_key = command.get('stream_key')
|
|
708
|
+
await self._remove_camera_internal(stream_key)
|
|
709
|
+
success = await self._add_camera_internal(camera_config)
|
|
710
|
+
self._send_response(cmd_type, stream_key, success)
|
|
711
|
+
except Exception as e:
|
|
712
|
+
self.logger.error(f"Error processing command {cmd_type}: {e}")
|
|
713
|
+
self._send_response(cmd_type, command.get('stream_key'), False, str(e))
|
|
714
|
+
|
|
715
|
+
async def _add_camera_internal(self, camera_config: Dict[str, Any]) -> bool:
|
|
716
|
+
"""Add a camera and start its streaming task."""
|
|
717
|
+
stream_key = camera_config.get('stream_key')
|
|
718
|
+
if not stream_key:
|
|
719
|
+
return False
|
|
720
|
+
|
|
721
|
+
if stream_key in self.camera_tasks:
|
|
722
|
+
self.logger.warning(f"Camera {stream_key} already exists")
|
|
723
|
+
return False
|
|
724
|
+
|
|
725
|
+
try:
|
|
726
|
+
task = asyncio.create_task(
|
|
727
|
+
self._camera_handler(camera_config),
|
|
728
|
+
name=f"ffmpeg-camera-{stream_key}"
|
|
729
|
+
)
|
|
730
|
+
self.camera_tasks[stream_key] = task
|
|
731
|
+
self.logger.info(f"Added FFmpeg camera {stream_key}")
|
|
732
|
+
return True
|
|
733
|
+
except Exception as e:
|
|
734
|
+
self.logger.error(f"Failed to add camera {stream_key}: {e}")
|
|
735
|
+
return False
|
|
736
|
+
|
|
737
|
+
async def _remove_camera_internal(self, stream_key: str) -> bool:
|
|
738
|
+
"""Remove a camera and stop its streaming task."""
|
|
739
|
+
if stream_key not in self.camera_tasks:
|
|
740
|
+
return False
|
|
741
|
+
|
|
742
|
+
try:
|
|
743
|
+
task = self.camera_tasks[stream_key]
|
|
744
|
+
if not task.done():
|
|
745
|
+
task.cancel()
|
|
746
|
+
try:
|
|
747
|
+
await asyncio.wait_for(task, timeout=5.0)
|
|
748
|
+
except (asyncio.CancelledError, asyncio.TimeoutError):
|
|
749
|
+
pass
|
|
750
|
+
|
|
751
|
+
del self.camera_tasks[stream_key]
|
|
752
|
+
|
|
753
|
+
if stream_key in self.pipelines:
|
|
754
|
+
self.pipelines[stream_key].close()
|
|
755
|
+
del self.pipelines[stream_key]
|
|
756
|
+
|
|
757
|
+
self.logger.info(f"Removed camera {stream_key}")
|
|
758
|
+
return True
|
|
759
|
+
except Exception as e:
|
|
760
|
+
self.logger.error(f"Error removing camera {stream_key}: {e}")
|
|
761
|
+
return False
|
|
762
|
+
|
|
763
|
+
def _send_response(self, cmd_type: str, stream_key: str, success: bool, error: str = None):
|
|
764
|
+
"""Send response back to manager."""
|
|
765
|
+
if self.response_queue:
|
|
766
|
+
try:
|
|
767
|
+
self.response_queue.put_nowait({
|
|
768
|
+
'worker_id': self.worker_id,
|
|
769
|
+
'command_type': cmd_type,
|
|
770
|
+
'stream_key': stream_key,
|
|
771
|
+
'success': success,
|
|
772
|
+
'error': error,
|
|
773
|
+
'timestamp': time.time()
|
|
774
|
+
})
|
|
775
|
+
except Exception:
|
|
776
|
+
pass
|
|
777
|
+
|
|
778
|
+
async def _shutdown(self):
|
|
779
|
+
"""Gracefully shutdown worker."""
|
|
780
|
+
self.logger.info(f"Worker {self.worker_id}: Starting shutdown")
|
|
781
|
+
|
|
782
|
+
# Cancel all camera tasks
|
|
783
|
+
for stream_key, task in self.camera_tasks.items():
|
|
784
|
+
if not task.done():
|
|
785
|
+
task.cancel()
|
|
786
|
+
|
|
787
|
+
if self.camera_tasks:
|
|
788
|
+
await asyncio.gather(*self.camera_tasks.values(), return_exceptions=True)
|
|
789
|
+
|
|
790
|
+
# Close all pipelines
|
|
791
|
+
for stream_key, pipeline in list(self.pipelines.items()):
|
|
792
|
+
pipeline.close()
|
|
793
|
+
self.pipelines.clear()
|
|
794
|
+
|
|
795
|
+
# Cleanup SHM buffers
|
|
796
|
+
if self.use_shm:
|
|
797
|
+
for camera_id, shm_buffer in list(self._shm_buffers.items()):
|
|
798
|
+
try:
|
|
799
|
+
shm_buffer.close()
|
|
800
|
+
except Exception:
|
|
801
|
+
pass
|
|
802
|
+
self._shm_buffers.clear()
|
|
803
|
+
|
|
804
|
+
# Close Redis client
|
|
805
|
+
if self.redis_client:
|
|
806
|
+
await self.redis_client.close()
|
|
807
|
+
|
|
808
|
+
# Shutdown executor
|
|
809
|
+
self.executor.shutdown(wait=True, cancel_futures=False)
|
|
810
|
+
|
|
811
|
+
self._report_health("stopped")
|
|
812
|
+
self.logger.info(f"Worker {self.worker_id}: Shutdown complete")
|
|
813
|
+
|
|
814
|
+
def _report_health(self, status: str, active_cameras: int = 0, error: Optional[str] = None):
|
|
815
|
+
"""Report health status to main process."""
|
|
816
|
+
try:
|
|
817
|
+
proc_cpu = 0
|
|
818
|
+
proc_memory_mb = 0
|
|
819
|
+
try:
|
|
820
|
+
proc_cpu = self._process_info.cpu_percent(interval=None)
|
|
821
|
+
proc_memory_mb = self._process_info.memory_info().rss / 1024 / 1024
|
|
822
|
+
except Exception:
|
|
823
|
+
pass
|
|
824
|
+
|
|
825
|
+
avg_encoding_ms = 0
|
|
826
|
+
if self._encoding_times:
|
|
827
|
+
avg_encoding_ms = sum(self._encoding_times) / len(self._encoding_times) * 1000
|
|
828
|
+
|
|
829
|
+
health_report = {
|
|
830
|
+
'worker_id': self.worker_id,
|
|
831
|
+
'status': status,
|
|
832
|
+
'active_cameras': active_cameras,
|
|
833
|
+
'timestamp': time.time(),
|
|
834
|
+
'error': error,
|
|
835
|
+
'metrics': {
|
|
836
|
+
'cpu_percent': proc_cpu,
|
|
837
|
+
'memory_mb': proc_memory_mb,
|
|
838
|
+
'frames_encoded': self._frames_encoded,
|
|
839
|
+
'encoding_errors': self._encoding_errors,
|
|
840
|
+
'avg_encoding_ms': avg_encoding_ms,
|
|
841
|
+
'pinned_cores': self.pinned_cores,
|
|
842
|
+
'backend': 'ffmpeg',
|
|
843
|
+
},
|
|
844
|
+
}
|
|
845
|
+
self.health_queue.put_nowait(health_report)
|
|
846
|
+
except Exception:
|
|
847
|
+
pass
|
|
848
|
+
|
|
849
|
+
def _log_metrics(self) -> None:
|
|
850
|
+
"""Log periodic metrics summary for all cameras."""
|
|
851
|
+
current_time = time.time()
|
|
852
|
+
elapsed = current_time - self._last_fps_check_time
|
|
853
|
+
if elapsed <= 0:
|
|
854
|
+
return
|
|
855
|
+
|
|
856
|
+
# Calculate total FPS across all cameras
|
|
857
|
+
total_frames = sum(self._frames_per_camera.values())
|
|
858
|
+
total_fps = total_frames / elapsed if elapsed > 0 else 0
|
|
859
|
+
num_active = len(self._frames_per_camera)
|
|
860
|
+
avg_fps_per_camera = total_fps / num_active if num_active > 0 else 0
|
|
861
|
+
|
|
862
|
+
# Get timing stats
|
|
863
|
+
avg_encoding_ms = 0
|
|
864
|
+
if self._encoding_times:
|
|
865
|
+
avg_encoding_ms = sum(self._encoding_times) / len(self._encoding_times) * 1000
|
|
866
|
+
|
|
867
|
+
avg_frame_ms = 0
|
|
868
|
+
if self._frame_times:
|
|
869
|
+
avg_frame_ms = sum(self._frame_times) / len(self._frame_times) * 1000
|
|
870
|
+
|
|
871
|
+
# Get process stats
|
|
872
|
+
try:
|
|
873
|
+
cpu_percent = self._process_info.cpu_percent(interval=None)
|
|
874
|
+
memory_mb = self._process_info.memory_info().rss / 1024 / 1024
|
|
875
|
+
except Exception:
|
|
876
|
+
cpu_percent = 0
|
|
877
|
+
memory_mb = 0
|
|
878
|
+
|
|
879
|
+
# Log summary
|
|
880
|
+
mode_str = "SHM" if self.use_shm else "JPEG"
|
|
881
|
+
self.logger.info(
|
|
882
|
+
f"Worker {self.worker_id} metrics ({mode_str}): "
|
|
883
|
+
f"cameras={num_active}, total_fps={total_fps:.1f}, avg_fps={avg_fps_per_camera:.1f}, "
|
|
884
|
+
f"avg_encode={avg_encoding_ms:.1f}ms, avg_frame={avg_frame_ms:.1f}ms, "
|
|
885
|
+
f"cpu={cpu_percent:.1f}%, mem={memory_mb:.0f}MB"
|
|
886
|
+
)
|
|
887
|
+
|
|
888
|
+
# Reset per-camera frame counters
|
|
889
|
+
self._frames_per_camera.clear()
|
|
890
|
+
self._last_fps_check_time = current_time
|
|
891
|
+
self._last_metrics_log = current_time
|
|
892
|
+
|
|
893
|
+
|
|
894
|
+
def run_ffmpeg_worker(
|
|
895
|
+
worker_id: int,
|
|
896
|
+
camera_configs: List[Dict[str, Any]],
|
|
897
|
+
stream_config: Dict[str, Any],
|
|
898
|
+
stop_event: multiprocessing.Event,
|
|
899
|
+
health_queue: multiprocessing.Queue,
|
|
900
|
+
command_queue: multiprocessing.Queue = None,
|
|
901
|
+
response_queue: multiprocessing.Queue = None,
|
|
902
|
+
ffmpeg_config_dict: Optional[Dict[str, Any]] = None,
|
|
903
|
+
use_shm: bool = False,
|
|
904
|
+
shm_slot_count: int = 1000,
|
|
905
|
+
shm_frame_format: str = "BGR",
|
|
906
|
+
pin_cpu_affinity: bool = True,
|
|
907
|
+
total_workers: int = 1,
|
|
908
|
+
frame_optimizer_enabled: bool = True,
|
|
909
|
+
frame_optimizer_config: Optional[Dict[str, Any]] = None,
|
|
910
|
+
):
|
|
911
|
+
"""Entry point for FFmpeg worker process.
|
|
912
|
+
|
|
913
|
+
Args:
|
|
914
|
+
worker_id: Worker identifier
|
|
915
|
+
camera_configs: List of camera configurations
|
|
916
|
+
stream_config: Streaming configuration
|
|
917
|
+
stop_event: Shutdown event
|
|
918
|
+
health_queue: Health reporting queue
|
|
919
|
+
command_queue: Queue for receiving dynamic camera commands
|
|
920
|
+
response_queue: Queue for sending command responses
|
|
921
|
+
ffmpeg_config_dict: FFmpeg configuration as dict
|
|
922
|
+
use_shm: Enable SHM mode
|
|
923
|
+
shm_slot_count: Number of frame slots per camera ring buffer
|
|
924
|
+
shm_frame_format: Frame format for SHM storage
|
|
925
|
+
pin_cpu_affinity: Pin worker to specific CPU cores
|
|
926
|
+
total_workers: Total number of workers
|
|
927
|
+
frame_optimizer_enabled: Enable frame optimizer
|
|
928
|
+
frame_optimizer_config: Frame optimizer configuration dict
|
|
929
|
+
"""
|
|
930
|
+
logging.basicConfig(
|
|
931
|
+
level=logging.INFO,
|
|
932
|
+
format=f'%(asctime)s - FFmpegWorker-{worker_id} - %(name)s - %(levelname)s - %(message)s'
|
|
933
|
+
)
|
|
934
|
+
|
|
935
|
+
logger = logging.getLogger(f"AsyncFFmpegWorker-{worker_id}")
|
|
936
|
+
logger.info(f"Starting FFmpeg worker {worker_id}")
|
|
937
|
+
|
|
938
|
+
# Create FFmpeg config from dict
|
|
939
|
+
ffmpeg_config = None
|
|
940
|
+
if ffmpeg_config_dict:
|
|
941
|
+
ffmpeg_config = FFmpegConfig(**ffmpeg_config_dict)
|
|
942
|
+
|
|
943
|
+
try:
|
|
944
|
+
worker = AsyncFFmpegWorker(
|
|
945
|
+
worker_id=worker_id,
|
|
946
|
+
camera_configs=camera_configs,
|
|
947
|
+
stream_config=stream_config,
|
|
948
|
+
stop_event=stop_event,
|
|
949
|
+
health_queue=health_queue,
|
|
950
|
+
command_queue=command_queue,
|
|
951
|
+
response_queue=response_queue,
|
|
952
|
+
ffmpeg_config=ffmpeg_config,
|
|
953
|
+
use_shm=use_shm,
|
|
954
|
+
shm_slot_count=shm_slot_count,
|
|
955
|
+
shm_frame_format=shm_frame_format,
|
|
956
|
+
pin_cpu_affinity=pin_cpu_affinity,
|
|
957
|
+
total_workers=total_workers,
|
|
958
|
+
frame_optimizer_enabled=frame_optimizer_enabled,
|
|
959
|
+
frame_optimizer_config=frame_optimizer_config,
|
|
960
|
+
)
|
|
961
|
+
|
|
962
|
+
asyncio.run(worker.run())
|
|
963
|
+
|
|
964
|
+
except Exception as e:
|
|
965
|
+
logger.error(f"Worker {worker_id} failed: {e}", exc_info=True)
|
|
966
|
+
raise
|