matrice-streaming 0.1.14__py3-none-any.whl → 0.1.65__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- matrice_streaming/__init__.py +44 -32
- matrice_streaming/streaming_gateway/camera_streamer/__init__.py +68 -1
- matrice_streaming/streaming_gateway/camera_streamer/async_camera_worker.py +1388 -0
- matrice_streaming/streaming_gateway/camera_streamer/async_ffmpeg_worker.py +966 -0
- matrice_streaming/streaming_gateway/camera_streamer/camera_streamer.py +188 -24
- matrice_streaming/streaming_gateway/camera_streamer/device_detection.py +507 -0
- matrice_streaming/streaming_gateway/camera_streamer/encoding_pool_manager.py +136 -0
- matrice_streaming/streaming_gateway/camera_streamer/ffmpeg_camera_streamer.py +1048 -0
- matrice_streaming/streaming_gateway/camera_streamer/ffmpeg_config.py +192 -0
- matrice_streaming/streaming_gateway/camera_streamer/ffmpeg_worker_manager.py +470 -0
- matrice_streaming/streaming_gateway/camera_streamer/gstreamer_camera_streamer.py +1368 -0
- matrice_streaming/streaming_gateway/camera_streamer/gstreamer_worker.py +1063 -0
- matrice_streaming/streaming_gateway/camera_streamer/gstreamer_worker_manager.py +546 -0
- matrice_streaming/streaming_gateway/camera_streamer/message_builder.py +60 -15
- matrice_streaming/streaming_gateway/camera_streamer/nvdec.py +1330 -0
- matrice_streaming/streaming_gateway/camera_streamer/nvdec_worker_manager.py +412 -0
- matrice_streaming/streaming_gateway/camera_streamer/platform_pipelines.py +680 -0
- matrice_streaming/streaming_gateway/camera_streamer/stream_statistics.py +111 -4
- matrice_streaming/streaming_gateway/camera_streamer/video_capture_manager.py +223 -27
- matrice_streaming/streaming_gateway/camera_streamer/worker_manager.py +694 -0
- matrice_streaming/streaming_gateway/debug/__init__.py +27 -2
- matrice_streaming/streaming_gateway/debug/benchmark.py +727 -0
- matrice_streaming/streaming_gateway/debug/debug_gstreamer_gateway.py +599 -0
- matrice_streaming/streaming_gateway/debug/debug_streaming_gateway.py +245 -95
- matrice_streaming/streaming_gateway/debug/debug_utils.py +29 -0
- matrice_streaming/streaming_gateway/debug/test_videoplayback.py +318 -0
- matrice_streaming/streaming_gateway/dynamic_camera_manager.py +656 -39
- matrice_streaming/streaming_gateway/metrics_reporter.py +676 -139
- matrice_streaming/streaming_gateway/streaming_action.py +71 -20
- matrice_streaming/streaming_gateway/streaming_gateway.py +1026 -78
- matrice_streaming/streaming_gateway/streaming_gateway_utils.py +175 -20
- matrice_streaming/streaming_gateway/streaming_status_listener.py +89 -0
- {matrice_streaming-0.1.14.dist-info → matrice_streaming-0.1.65.dist-info}/METADATA +1 -1
- matrice_streaming-0.1.65.dist-info/RECORD +56 -0
- matrice_streaming-0.1.14.dist-info/RECORD +0 -38
- {matrice_streaming-0.1.14.dist-info → matrice_streaming-0.1.65.dist-info}/WHEEL +0 -0
- {matrice_streaming-0.1.14.dist-info → matrice_streaming-0.1.65.dist-info}/licenses/LICENSE.txt +0 -0
- {matrice_streaming-0.1.14.dist-info → matrice_streaming-0.1.65.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,1048 @@
|
|
|
1
|
+
"""FFmpeg-based camera streamer for high-performance video ingestion.
|
|
2
|
+
|
|
3
|
+
This module implements a drop-in replacement for CameraStreamer using
|
|
4
|
+
FFmpeg subprocess pipes instead of OpenCV. Key advantages:
|
|
5
|
+
- No OpenCV wrapper overhead
|
|
6
|
+
- No Python ↔ C per-frame calls
|
|
7
|
+
- Fewer memory copies
|
|
8
|
+
- Better FFmpeg scheduling
|
|
9
|
+
- Decoder threads isolated from Python GIL
|
|
10
|
+
"""
|
|
11
|
+
import asyncio
|
|
12
|
+
import logging
|
|
13
|
+
import subprocess
|
|
14
|
+
import signal
|
|
15
|
+
import time
|
|
16
|
+
import threading
|
|
17
|
+
import os
|
|
18
|
+
import sys
|
|
19
|
+
from typing import Dict, Any, Optional, List, Union, Tuple
|
|
20
|
+
from concurrent.futures import ThreadPoolExecutor
|
|
21
|
+
from dataclasses import dataclass, field
|
|
22
|
+
from collections import deque
|
|
23
|
+
|
|
24
|
+
import numpy as np
|
|
25
|
+
import cv2
|
|
26
|
+
import psutil
|
|
27
|
+
|
|
28
|
+
from matrice_common.optimize import FrameOptimizer
|
|
29
|
+
from matrice_common.stream.shm_ring_buffer import ShmRingBuffer
|
|
30
|
+
|
|
31
|
+
from .ffmpeg_config import FFmpegConfig, is_ffmpeg_available, detect_hwaccel
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class FFmpegPipeline:
|
|
35
|
+
"""FFmpeg subprocess-based frame capture pipeline.
|
|
36
|
+
|
|
37
|
+
This class manages an FFmpeg subprocess that decodes video and outputs
|
|
38
|
+
raw frames via stdout pipe. It provides both sync and async interfaces.
|
|
39
|
+
"""
|
|
40
|
+
|
|
41
|
+
def __init__(
|
|
42
|
+
self,
|
|
43
|
+
source: str,
|
|
44
|
+
width: int,
|
|
45
|
+
height: int,
|
|
46
|
+
config: Optional[FFmpegConfig] = None,
|
|
47
|
+
stream_key: str = "default",
|
|
48
|
+
):
|
|
49
|
+
"""Initialize FFmpeg pipeline.
|
|
50
|
+
|
|
51
|
+
Args:
|
|
52
|
+
source: Video source (file path, RTSP URL, HTTP URL, device)
|
|
53
|
+
width: Frame width (0 = auto-detect from source)
|
|
54
|
+
height: Frame height (0 = auto-detect from source)
|
|
55
|
+
config: FFmpeg configuration options
|
|
56
|
+
stream_key: Stream identifier for logging
|
|
57
|
+
"""
|
|
58
|
+
self.source = source
|
|
59
|
+
self.config = config or FFmpegConfig()
|
|
60
|
+
self.stream_key = stream_key
|
|
61
|
+
self.logger = logging.getLogger(f"FFmpegPipeline-{stream_key}")
|
|
62
|
+
|
|
63
|
+
# Get source dimensions if not specified
|
|
64
|
+
if width == 0 or height == 0:
|
|
65
|
+
detected_width, detected_height = self._detect_dimensions(source)
|
|
66
|
+
width = width or detected_width
|
|
67
|
+
height = height or detected_height
|
|
68
|
+
|
|
69
|
+
# Apply downscale if configured
|
|
70
|
+
if self.config.output_width > 0:
|
|
71
|
+
width = self.config.output_width
|
|
72
|
+
if self.config.output_height > 0:
|
|
73
|
+
height = self.config.output_height
|
|
74
|
+
|
|
75
|
+
self.width = width
|
|
76
|
+
self.height = height
|
|
77
|
+
|
|
78
|
+
# Calculate frame size based on pixel format
|
|
79
|
+
self.bytes_per_pixel = self._get_bytes_per_pixel(self.config.pixel_format)
|
|
80
|
+
self.frame_size = width * height * self.bytes_per_pixel
|
|
81
|
+
|
|
82
|
+
# Process state
|
|
83
|
+
self.proc: Optional[subprocess.Popen] = None
|
|
84
|
+
self.is_running = False
|
|
85
|
+
self._restart_count = 0
|
|
86
|
+
self._last_frame_time = 0.0
|
|
87
|
+
|
|
88
|
+
# Metrics
|
|
89
|
+
self.frames_read = 0
|
|
90
|
+
self.frames_dropped = 0
|
|
91
|
+
self.errors = 0
|
|
92
|
+
self.bytes_read = 0
|
|
93
|
+
self.latencies: deque = deque(maxlen=1000)
|
|
94
|
+
|
|
95
|
+
# Start the pipeline
|
|
96
|
+
self._start()
|
|
97
|
+
|
|
98
|
+
def _get_bytes_per_pixel(self, pixel_format: str) -> int:
|
|
99
|
+
"""Get bytes per pixel for the given format."""
|
|
100
|
+
formats = {
|
|
101
|
+
"bgr24": 3,
|
|
102
|
+
"rgb24": 3,
|
|
103
|
+
"nv12": 1.5, # Y plane + UV interleaved
|
|
104
|
+
"gray": 1,
|
|
105
|
+
}
|
|
106
|
+
return int(formats.get(pixel_format, 3))
|
|
107
|
+
|
|
108
|
+
def _detect_dimensions(self, source: str) -> Tuple[int, int]:
|
|
109
|
+
"""Detect video dimensions using ffprobe.
|
|
110
|
+
|
|
111
|
+
Args:
|
|
112
|
+
source: Video source
|
|
113
|
+
|
|
114
|
+
Returns:
|
|
115
|
+
Tuple of (width, height), defaults to (640, 480) on failure
|
|
116
|
+
"""
|
|
117
|
+
try:
|
|
118
|
+
cmd = [
|
|
119
|
+
"ffprobe",
|
|
120
|
+
"-v", "error",
|
|
121
|
+
"-select_streams", "v:0",
|
|
122
|
+
"-show_entries", "stream=width,height",
|
|
123
|
+
"-of", "csv=p=0",
|
|
124
|
+
source
|
|
125
|
+
]
|
|
126
|
+
result = subprocess.run(cmd, capture_output=True, text=True, timeout=10)
|
|
127
|
+
if result.returncode == 0:
|
|
128
|
+
parts = result.stdout.strip().split(",")
|
|
129
|
+
if len(parts) >= 2:
|
|
130
|
+
return int(parts[0]), int(parts[1])
|
|
131
|
+
except Exception as e:
|
|
132
|
+
self.logger.warning(f"Failed to detect dimensions: {e}")
|
|
133
|
+
|
|
134
|
+
return 640, 480 # Default fallback
|
|
135
|
+
|
|
136
|
+
def _start(self):
|
|
137
|
+
"""Start the FFmpeg subprocess."""
|
|
138
|
+
if self.proc is not None:
|
|
139
|
+
self._stop()
|
|
140
|
+
|
|
141
|
+
# Build command
|
|
142
|
+
cmd = self.config.to_ffmpeg_args(
|
|
143
|
+
self.source, self.width, self.height
|
|
144
|
+
)
|
|
145
|
+
|
|
146
|
+
self.logger.info(f"Starting FFmpeg pipeline: {' '.join(cmd[:10])}...")
|
|
147
|
+
|
|
148
|
+
try:
|
|
149
|
+
self.proc = subprocess.Popen(
|
|
150
|
+
cmd,
|
|
151
|
+
stdout=subprocess.PIPE,
|
|
152
|
+
stderr=subprocess.PIPE,
|
|
153
|
+
bufsize=self.frame_size * self.config.buffer_frames,
|
|
154
|
+
)
|
|
155
|
+
self.is_running = True
|
|
156
|
+
self._restart_count += 1
|
|
157
|
+
self.logger.info(
|
|
158
|
+
f"FFmpeg pipeline started (PID: {self.proc.pid}, "
|
|
159
|
+
f"frame_size: {self.frame_size}, restart: {self._restart_count})"
|
|
160
|
+
)
|
|
161
|
+
except Exception as e:
|
|
162
|
+
self.logger.error(f"Failed to start FFmpeg: {e}")
|
|
163
|
+
self.is_running = False
|
|
164
|
+
raise
|
|
165
|
+
|
|
166
|
+
def _stop(self):
|
|
167
|
+
"""Stop the FFmpeg subprocess gracefully."""
|
|
168
|
+
if self.proc is None:
|
|
169
|
+
return
|
|
170
|
+
|
|
171
|
+
try:
|
|
172
|
+
# Close stdout first to signal EOF
|
|
173
|
+
if self.proc.stdout:
|
|
174
|
+
self.proc.stdout.close()
|
|
175
|
+
|
|
176
|
+
# Try graceful termination
|
|
177
|
+
self.proc.terminate()
|
|
178
|
+
try:
|
|
179
|
+
self.proc.wait(timeout=2)
|
|
180
|
+
except subprocess.TimeoutExpired:
|
|
181
|
+
self.proc.kill()
|
|
182
|
+
self.proc.wait(timeout=1)
|
|
183
|
+
|
|
184
|
+
self.logger.info(f"FFmpeg pipeline stopped (PID: {self.proc.pid})")
|
|
185
|
+
except Exception as e:
|
|
186
|
+
self.logger.warning(f"Error stopping FFmpeg: {e}")
|
|
187
|
+
try:
|
|
188
|
+
self.proc.kill()
|
|
189
|
+
except Exception:
|
|
190
|
+
pass
|
|
191
|
+
finally:
|
|
192
|
+
self.proc = None
|
|
193
|
+
self.is_running = False
|
|
194
|
+
|
|
195
|
+
def read_frame(self) -> Optional[np.ndarray]:
|
|
196
|
+
"""Read one raw frame from FFmpeg pipe (blocking).
|
|
197
|
+
|
|
198
|
+
Returns:
|
|
199
|
+
numpy array of shape (height, width, 3) or None if failed
|
|
200
|
+
"""
|
|
201
|
+
if self.proc is None or self.proc.poll() is not None:
|
|
202
|
+
self.is_running = False
|
|
203
|
+
return None
|
|
204
|
+
|
|
205
|
+
start_time = time.time()
|
|
206
|
+
|
|
207
|
+
try:
|
|
208
|
+
data = self.proc.stdout.read(self.frame_size)
|
|
209
|
+
|
|
210
|
+
if len(data) != self.frame_size:
|
|
211
|
+
self.frames_dropped += 1
|
|
212
|
+
self.logger.debug(f"Incomplete frame: got {len(data)}, expected {self.frame_size}")
|
|
213
|
+
return None
|
|
214
|
+
|
|
215
|
+
# Convert to numpy array
|
|
216
|
+
frame = np.frombuffer(data, dtype=np.uint8)
|
|
217
|
+
frame = frame.reshape((self.height, self.width, self.bytes_per_pixel))
|
|
218
|
+
|
|
219
|
+
# Record metrics
|
|
220
|
+
self.frames_read += 1
|
|
221
|
+
self.bytes_read += len(data)
|
|
222
|
+
latency = time.time() - start_time
|
|
223
|
+
self.latencies.append(latency)
|
|
224
|
+
self._last_frame_time = time.time()
|
|
225
|
+
|
|
226
|
+
return frame
|
|
227
|
+
|
|
228
|
+
except Exception as e:
|
|
229
|
+
self.errors += 1
|
|
230
|
+
self.logger.error(f"Error reading frame: {e}")
|
|
231
|
+
return None
|
|
232
|
+
|
|
233
|
+
async def read_frame_async(
|
|
234
|
+
self,
|
|
235
|
+
executor: Optional[ThreadPoolExecutor] = None
|
|
236
|
+
) -> Optional[np.ndarray]:
|
|
237
|
+
"""Read frame asynchronously using executor.
|
|
238
|
+
|
|
239
|
+
Args:
|
|
240
|
+
executor: Thread pool executor (uses default if None)
|
|
241
|
+
|
|
242
|
+
Returns:
|
|
243
|
+
numpy array or None if failed
|
|
244
|
+
"""
|
|
245
|
+
loop = asyncio.get_event_loop()
|
|
246
|
+
return await loop.run_in_executor(executor, self.read_frame)
|
|
247
|
+
|
|
248
|
+
def restart(self) -> bool:
|
|
249
|
+
"""Restart the FFmpeg pipeline.
|
|
250
|
+
|
|
251
|
+
Returns:
|
|
252
|
+
True if restart succeeded
|
|
253
|
+
"""
|
|
254
|
+
try:
|
|
255
|
+
self._stop()
|
|
256
|
+
time.sleep(self.config.reconnect_delay)
|
|
257
|
+
self._start()
|
|
258
|
+
return True
|
|
259
|
+
except Exception as e:
|
|
260
|
+
self.logger.error(f"Failed to restart pipeline: {e}")
|
|
261
|
+
return False
|
|
262
|
+
|
|
263
|
+
def get_metrics(self) -> Dict[str, Any]:
|
|
264
|
+
"""Get pipeline metrics.
|
|
265
|
+
|
|
266
|
+
Returns:
|
|
267
|
+
Dictionary of metrics
|
|
268
|
+
"""
|
|
269
|
+
avg_latency = sum(self.latencies) / len(self.latencies) if self.latencies else 0
|
|
270
|
+
return {
|
|
271
|
+
"frames_read": self.frames_read,
|
|
272
|
+
"frames_dropped": self.frames_dropped,
|
|
273
|
+
"errors": self.errors,
|
|
274
|
+
"bytes_read": self.bytes_read,
|
|
275
|
+
"restart_count": self._restart_count,
|
|
276
|
+
"is_running": self.is_running,
|
|
277
|
+
"avg_latency_ms": avg_latency * 1000,
|
|
278
|
+
"width": self.width,
|
|
279
|
+
"height": self.height,
|
|
280
|
+
"frame_size": self.frame_size,
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
def close(self):
|
|
284
|
+
"""Close the pipeline and release resources."""
|
|
285
|
+
self._stop()
|
|
286
|
+
|
|
287
|
+
def __enter__(self):
|
|
288
|
+
return self
|
|
289
|
+
|
|
290
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
291
|
+
self.close()
|
|
292
|
+
|
|
293
|
+
|
|
294
|
+
class FFmpegCameraStreamer:
|
|
295
|
+
"""FFmpeg-based camera streamer with same API as CameraStreamer.
|
|
296
|
+
|
|
297
|
+
This class provides a drop-in replacement for CameraStreamer using
|
|
298
|
+
FFmpeg subprocess pipelines for video ingestion. It supports:
|
|
299
|
+
- Background streaming to Redis/Kafka
|
|
300
|
+
- Dynamic camera add/remove/update
|
|
301
|
+
- Frame optimization (similarity detection)
|
|
302
|
+
- SHM mode for raw frame sharing
|
|
303
|
+
"""
|
|
304
|
+
|
|
305
|
+
def __init__(
|
|
306
|
+
self,
|
|
307
|
+
session,
|
|
308
|
+
service_id: str,
|
|
309
|
+
server_type: str = "redis",
|
|
310
|
+
video_codec: Optional[str] = None,
|
|
311
|
+
gateway_util=None,
|
|
312
|
+
ffmpeg_config: Optional[FFmpegConfig] = None,
|
|
313
|
+
# SHM mode options
|
|
314
|
+
use_shm: bool = False,
|
|
315
|
+
shm_slot_count: int = 1000,
|
|
316
|
+
shm_frame_format: str = "BGR",
|
|
317
|
+
# Frame optimizer options
|
|
318
|
+
frame_optimizer_enabled: bool = True,
|
|
319
|
+
frame_optimizer_config: Optional[Dict[str, Any]] = None,
|
|
320
|
+
# CPU affinity options
|
|
321
|
+
pin_cpu_affinity: bool = False,
|
|
322
|
+
cpu_affinity_core: Optional[int] = None,
|
|
323
|
+
):
|
|
324
|
+
"""Initialize FFmpeg camera streamer.
|
|
325
|
+
|
|
326
|
+
Args:
|
|
327
|
+
session: Session object for authentication
|
|
328
|
+
service_id: Service identifier
|
|
329
|
+
server_type: Backend type (redis or kafka)
|
|
330
|
+
video_codec: Video codec (h264 or h265)
|
|
331
|
+
gateway_util: Gateway utility for API interactions
|
|
332
|
+
ffmpeg_config: FFmpeg configuration
|
|
333
|
+
use_shm: Enable SHM mode for raw frame sharing
|
|
334
|
+
shm_slot_count: Number of frame slots per camera ring buffer
|
|
335
|
+
shm_frame_format: Frame format for SHM storage ("BGR", "RGB", or "NV12")
|
|
336
|
+
frame_optimizer_enabled: Enable frame optimizer for skipping similar frames
|
|
337
|
+
frame_optimizer_config: Frame optimizer configuration dict
|
|
338
|
+
pin_cpu_affinity: Pin process to specific CPU core
|
|
339
|
+
cpu_affinity_core: CPU core to pin to (None = auto-assign)
|
|
340
|
+
"""
|
|
341
|
+
self.session = session
|
|
342
|
+
self.service_id = service_id
|
|
343
|
+
self.server_type = server_type
|
|
344
|
+
self.video_codec = video_codec
|
|
345
|
+
self.gateway_util = gateway_util
|
|
346
|
+
self.ffmpeg_config = ffmpeg_config or FFmpegConfig()
|
|
347
|
+
|
|
348
|
+
self.logger = logging.getLogger("FFmpegCameraStreamer")
|
|
349
|
+
|
|
350
|
+
# Validate FFmpeg availability
|
|
351
|
+
if not is_ffmpeg_available():
|
|
352
|
+
raise RuntimeError("FFmpeg is not available on this system")
|
|
353
|
+
|
|
354
|
+
# Auto-detect hardware acceleration if set to auto
|
|
355
|
+
if self.ffmpeg_config.hwaccel == "auto":
|
|
356
|
+
self.ffmpeg_config.hwaccel = detect_hwaccel()
|
|
357
|
+
self.logger.info(f"Auto-detected hwaccel: {self.ffmpeg_config.hwaccel}")
|
|
358
|
+
|
|
359
|
+
# Stream management
|
|
360
|
+
self.pipelines: Dict[str, FFmpegPipeline] = {}
|
|
361
|
+
self.streaming_threads: List[threading.Thread] = []
|
|
362
|
+
self._stop_streaming = False
|
|
363
|
+
self._stream_lock = threading.RLock()
|
|
364
|
+
|
|
365
|
+
# Topic registration
|
|
366
|
+
self.stream_topics: Dict[str, str] = {} # stream_key -> topic
|
|
367
|
+
self._setup_topics: set = set()
|
|
368
|
+
|
|
369
|
+
# Statistics
|
|
370
|
+
self._transmission_stats = {
|
|
371
|
+
"total_frames": 0,
|
|
372
|
+
"total_bytes": 0,
|
|
373
|
+
"start_time": None,
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
# MatriceStream client
|
|
377
|
+
self.stream_client = None
|
|
378
|
+
|
|
379
|
+
# ================================================================
|
|
380
|
+
# SHM Mode Configuration
|
|
381
|
+
# ================================================================
|
|
382
|
+
self.use_shm = use_shm
|
|
383
|
+
self.shm_slot_count = shm_slot_count
|
|
384
|
+
self.shm_frame_format = shm_frame_format
|
|
385
|
+
self._shm_buffers: Dict[str, ShmRingBuffer] = {}
|
|
386
|
+
self._last_shm_frame_idx: Dict[str, int] = {}
|
|
387
|
+
|
|
388
|
+
# Register signal handlers for SHM cleanup
|
|
389
|
+
if use_shm:
|
|
390
|
+
self._setup_signal_handlers()
|
|
391
|
+
self.logger.info(
|
|
392
|
+
f"SHM mode ENABLED: format={shm_frame_format}, slots={shm_slot_count}"
|
|
393
|
+
)
|
|
394
|
+
|
|
395
|
+
# ================================================================
|
|
396
|
+
# Frame Optimizer Configuration
|
|
397
|
+
# ================================================================
|
|
398
|
+
frame_optimizer_config = frame_optimizer_config or {}
|
|
399
|
+
self.frame_optimizer = FrameOptimizer(
|
|
400
|
+
enabled=frame_optimizer_enabled,
|
|
401
|
+
scale=frame_optimizer_config.get("scale", 0.4),
|
|
402
|
+
diff_threshold=frame_optimizer_config.get("diff_threshold", 15),
|
|
403
|
+
similarity_threshold=frame_optimizer_config.get("similarity_threshold", 0.05),
|
|
404
|
+
bg_update_interval=frame_optimizer_config.get("bg_update_interval", 10),
|
|
405
|
+
)
|
|
406
|
+
self._last_sent_frame_ids: Dict[str, str] = {}
|
|
407
|
+
|
|
408
|
+
# ================================================================
|
|
409
|
+
# CPU Affinity Configuration
|
|
410
|
+
# ================================================================
|
|
411
|
+
self.pin_cpu_affinity = pin_cpu_affinity
|
|
412
|
+
self.cpu_affinity_core = cpu_affinity_core
|
|
413
|
+
self.pinned_cores: Optional[List[int]] = None
|
|
414
|
+
|
|
415
|
+
if pin_cpu_affinity:
|
|
416
|
+
self._apply_cpu_affinity()
|
|
417
|
+
|
|
418
|
+
self.logger.info(
|
|
419
|
+
f"FFmpegCameraStreamer initialized: "
|
|
420
|
+
f"hwaccel={self.ffmpeg_config.hwaccel}, "
|
|
421
|
+
f"pixel_format={self.ffmpeg_config.pixel_format}, "
|
|
422
|
+
f"shm={use_shm}, optimizer={frame_optimizer_enabled}"
|
|
423
|
+
)
|
|
424
|
+
|
|
425
|
+
def _setup_signal_handlers(self):
|
|
426
|
+
"""Setup signal handlers for graceful SHM cleanup on termination."""
|
|
427
|
+
def cleanup_handler(signum, frame):
|
|
428
|
+
"""Handle SIGTERM/SIGINT for graceful SHM cleanup."""
|
|
429
|
+
sig_name = signal.Signals(signum).name if hasattr(signal.Signals, 'name') else str(signum)
|
|
430
|
+
self.logger.info(f"Received {sig_name}, cleaning up SHM buffers...")
|
|
431
|
+
self._cleanup_shm_buffers()
|
|
432
|
+
# Re-raise the signal to allow normal termination
|
|
433
|
+
signal.signal(signum, signal.SIG_DFL)
|
|
434
|
+
os.kill(os.getpid(), signum)
|
|
435
|
+
|
|
436
|
+
# Register signal handlers
|
|
437
|
+
signal.signal(signal.SIGINT, cleanup_handler)
|
|
438
|
+
if sys.platform != 'win32':
|
|
439
|
+
signal.signal(signal.SIGTERM, cleanup_handler)
|
|
440
|
+
|
|
441
|
+
# Also register atexit handler
|
|
442
|
+
import atexit
|
|
443
|
+
atexit.register(self._cleanup_shm_buffers)
|
|
444
|
+
|
|
445
|
+
def _cleanup_shm_buffers(self):
|
|
446
|
+
"""Cleanup all SHM buffers."""
|
|
447
|
+
for camera_id, shm_buffer in list(self._shm_buffers.items()):
|
|
448
|
+
try:
|
|
449
|
+
shm_buffer.close()
|
|
450
|
+
self.logger.info(f"Closed SHM buffer for {camera_id}")
|
|
451
|
+
except Exception as e:
|
|
452
|
+
self.logger.warning(f"Failed to cleanup SHM {camera_id}: {e}")
|
|
453
|
+
self._shm_buffers.clear()
|
|
454
|
+
|
|
455
|
+
def _apply_cpu_affinity(self):
|
|
456
|
+
"""Apply CPU affinity pinning."""
|
|
457
|
+
try:
|
|
458
|
+
p = psutil.Process()
|
|
459
|
+
if self.cpu_affinity_core is not None:
|
|
460
|
+
# Pin to specific core
|
|
461
|
+
self.pinned_cores = [self.cpu_affinity_core]
|
|
462
|
+
else:
|
|
463
|
+
# Auto-assign to first available core
|
|
464
|
+
cpu_count = psutil.cpu_count(logical=True)
|
|
465
|
+
self.pinned_cores = [0] if cpu_count > 0 else None
|
|
466
|
+
|
|
467
|
+
if self.pinned_cores:
|
|
468
|
+
p.cpu_affinity(self.pinned_cores)
|
|
469
|
+
self.logger.info(f"CPU affinity pinned to cores: {self.pinned_cores}")
|
|
470
|
+
except Exception as e:
|
|
471
|
+
self.logger.warning(f"Failed to set CPU affinity: {e}")
|
|
472
|
+
self.pinned_cores = None
|
|
473
|
+
|
|
474
|
+
def _get_or_create_shm_buffer(
|
|
475
|
+
self, stream_key: str, width: int, height: int
|
|
476
|
+
) -> ShmRingBuffer:
|
|
477
|
+
"""Get existing or create new SHM buffer for stream.
|
|
478
|
+
|
|
479
|
+
Args:
|
|
480
|
+
stream_key: Stream identifier
|
|
481
|
+
width: Frame width
|
|
482
|
+
height: Frame height
|
|
483
|
+
|
|
484
|
+
Returns:
|
|
485
|
+
ShmRingBuffer instance for this stream
|
|
486
|
+
"""
|
|
487
|
+
if stream_key not in self._shm_buffers:
|
|
488
|
+
format_map = {
|
|
489
|
+
"BGR": ShmRingBuffer.FORMAT_BGR,
|
|
490
|
+
"RGB": ShmRingBuffer.FORMAT_RGB,
|
|
491
|
+
"NV12": ShmRingBuffer.FORMAT_NV12,
|
|
492
|
+
}
|
|
493
|
+
frame_format = format_map.get(self.shm_frame_format, ShmRingBuffer.FORMAT_BGR)
|
|
494
|
+
|
|
495
|
+
self._shm_buffers[stream_key] = ShmRingBuffer(
|
|
496
|
+
camera_id=stream_key,
|
|
497
|
+
width=width,
|
|
498
|
+
height=height,
|
|
499
|
+
frame_format=frame_format,
|
|
500
|
+
slot_count=self.shm_slot_count,
|
|
501
|
+
create=True,
|
|
502
|
+
)
|
|
503
|
+
self.logger.info(
|
|
504
|
+
f"Created SHM buffer for {stream_key}: "
|
|
505
|
+
f"{width}x{height} {self.shm_frame_format}, {self.shm_slot_count} slots"
|
|
506
|
+
)
|
|
507
|
+
return self._shm_buffers[stream_key]
|
|
508
|
+
|
|
509
|
+
def register_stream_topic(self, stream_key: str, topic: str):
|
|
510
|
+
"""Register a topic for a stream.
|
|
511
|
+
|
|
512
|
+
Args:
|
|
513
|
+
stream_key: Stream identifier
|
|
514
|
+
topic: Topic name for this stream
|
|
515
|
+
"""
|
|
516
|
+
with self._stream_lock:
|
|
517
|
+
self.stream_topics[stream_key] = topic
|
|
518
|
+
self.logger.debug(f"Registered topic {topic} for stream {stream_key}")
|
|
519
|
+
|
|
520
|
+
def setup_stream_for_topic(self, topic: str):
|
|
521
|
+
"""Setup stream for a topic (create topic if needed).
|
|
522
|
+
|
|
523
|
+
Args:
|
|
524
|
+
topic: Topic name to setup
|
|
525
|
+
"""
|
|
526
|
+
with self._stream_lock:
|
|
527
|
+
if topic in self._setup_topics:
|
|
528
|
+
return
|
|
529
|
+
|
|
530
|
+
# Initialize stream client if needed
|
|
531
|
+
if self.stream_client is None:
|
|
532
|
+
self._init_stream_client()
|
|
533
|
+
|
|
534
|
+
self._setup_topics.add(topic)
|
|
535
|
+
self.logger.debug(f"Setup topic: {topic}")
|
|
536
|
+
|
|
537
|
+
def _init_stream_client(self):
|
|
538
|
+
"""Initialize the MatriceStream client."""
|
|
539
|
+
try:
|
|
540
|
+
from matrice_common.stream import MatriceStream, StreamType
|
|
541
|
+
|
|
542
|
+
# Get connection info from gateway util
|
|
543
|
+
if self.gateway_util:
|
|
544
|
+
conn_info = self.gateway_util.get_and_wait_for_connection_info()
|
|
545
|
+
else:
|
|
546
|
+
conn_info = {}
|
|
547
|
+
|
|
548
|
+
# Create MatriceStream
|
|
549
|
+
stream_type = StreamType.REDIS if self.server_type == "redis" else StreamType.KAFKA
|
|
550
|
+
self.stream_client = MatriceStream(
|
|
551
|
+
stream_type=stream_type,
|
|
552
|
+
**conn_info
|
|
553
|
+
)
|
|
554
|
+
|
|
555
|
+
self.logger.info("MatriceStream client initialized")
|
|
556
|
+
except Exception as e:
|
|
557
|
+
self.logger.error(f"Failed to initialize stream client: {e}")
|
|
558
|
+
raise
|
|
559
|
+
|
|
560
|
+
def start_background_stream(
|
|
561
|
+
self,
|
|
562
|
+
input: Union[str, int],
|
|
563
|
+
fps: int = 30,
|
|
564
|
+
stream_key: str = "default",
|
|
565
|
+
stream_group_key: str = "default",
|
|
566
|
+
quality: int = 90,
|
|
567
|
+
width: Optional[int] = None,
|
|
568
|
+
height: Optional[int] = None,
|
|
569
|
+
simulate_video_file_stream: bool = False,
|
|
570
|
+
camera_location: str = "Unknown",
|
|
571
|
+
) -> bool:
|
|
572
|
+
"""Start background streaming for a camera.
|
|
573
|
+
|
|
574
|
+
Args:
|
|
575
|
+
input: Video source (file path, URL, or device ID)
|
|
576
|
+
fps: Target FPS
|
|
577
|
+
stream_key: Unique stream identifier
|
|
578
|
+
stream_group_key: Stream group identifier
|
|
579
|
+
quality: JPEG quality (1-100)
|
|
580
|
+
width: Target width (None = use source)
|
|
581
|
+
height: Target height (None = use source)
|
|
582
|
+
simulate_video_file_stream: Loop video files
|
|
583
|
+
camera_location: Camera location description
|
|
584
|
+
|
|
585
|
+
Returns:
|
|
586
|
+
True if stream started successfully
|
|
587
|
+
"""
|
|
588
|
+
with self._stream_lock:
|
|
589
|
+
if stream_key in self.pipelines:
|
|
590
|
+
self.logger.warning(f"Stream {stream_key} already exists")
|
|
591
|
+
return False
|
|
592
|
+
|
|
593
|
+
try:
|
|
594
|
+
# Create FFmpeg config for this stream
|
|
595
|
+
stream_config = FFmpegConfig(
|
|
596
|
+
hwaccel=self.ffmpeg_config.hwaccel,
|
|
597
|
+
pixel_format=self.ffmpeg_config.pixel_format,
|
|
598
|
+
low_latency=self.ffmpeg_config.low_latency,
|
|
599
|
+
loop=simulate_video_file_stream,
|
|
600
|
+
realtime=not simulate_video_file_stream,
|
|
601
|
+
output_width=width or 0,
|
|
602
|
+
output_height=height or 0,
|
|
603
|
+
quality=quality,
|
|
604
|
+
)
|
|
605
|
+
|
|
606
|
+
# Convert device ID to string if needed
|
|
607
|
+
source = str(input) if isinstance(input, int) else input
|
|
608
|
+
|
|
609
|
+
# Create pipeline
|
|
610
|
+
pipeline = FFmpegPipeline(
|
|
611
|
+
source=source,
|
|
612
|
+
width=width or 0,
|
|
613
|
+
height=height or 0,
|
|
614
|
+
config=stream_config,
|
|
615
|
+
stream_key=stream_key,
|
|
616
|
+
)
|
|
617
|
+
self.pipelines[stream_key] = pipeline
|
|
618
|
+
|
|
619
|
+
# Start streaming thread
|
|
620
|
+
thread = threading.Thread(
|
|
621
|
+
target=self._stream_loop,
|
|
622
|
+
args=(stream_key, stream_group_key, fps, quality, camera_location),
|
|
623
|
+
name=f"FFmpegStream-{stream_key}",
|
|
624
|
+
daemon=True,
|
|
625
|
+
)
|
|
626
|
+
self.streaming_threads.append(thread)
|
|
627
|
+
thread.start()
|
|
628
|
+
|
|
629
|
+
if self._transmission_stats["start_time"] is None:
|
|
630
|
+
self._transmission_stats["start_time"] = time.time()
|
|
631
|
+
|
|
632
|
+
self.logger.info(
|
|
633
|
+
f"Started FFmpeg stream: {stream_key} from {source} "
|
|
634
|
+
f"({pipeline.width}x{pipeline.height} @ {fps} FPS)"
|
|
635
|
+
)
|
|
636
|
+
return True
|
|
637
|
+
|
|
638
|
+
except Exception as e:
|
|
639
|
+
self.logger.error(f"Failed to start stream {stream_key}: {e}")
|
|
640
|
+
return False
|
|
641
|
+
|
|
642
|
+
def _stream_loop(
|
|
643
|
+
self,
|
|
644
|
+
stream_key: str,
|
|
645
|
+
stream_group_key: str,
|
|
646
|
+
fps: int,
|
|
647
|
+
quality: int,
|
|
648
|
+
camera_location: str,
|
|
649
|
+
):
|
|
650
|
+
"""Main streaming loop for a camera (runs in thread).
|
|
651
|
+
|
|
652
|
+
Args:
|
|
653
|
+
stream_key: Stream identifier
|
|
654
|
+
stream_group_key: Stream group identifier
|
|
655
|
+
fps: Target FPS
|
|
656
|
+
quality: JPEG quality
|
|
657
|
+
camera_location: Camera location
|
|
658
|
+
"""
|
|
659
|
+
pipeline = self.pipelines.get(stream_key)
|
|
660
|
+
if not pipeline:
|
|
661
|
+
return
|
|
662
|
+
|
|
663
|
+
topic = self.stream_topics.get(stream_key, f"{stream_key}_topic")
|
|
664
|
+
frame_interval = 1.0 / fps
|
|
665
|
+
frame_counter = 0
|
|
666
|
+
|
|
667
|
+
self.logger.info(f"Stream loop started for {stream_key}")
|
|
668
|
+
|
|
669
|
+
while not self._stop_streaming:
|
|
670
|
+
loop_start = time.time()
|
|
671
|
+
|
|
672
|
+
try:
|
|
673
|
+
# Read frame from FFmpeg pipeline
|
|
674
|
+
frame = pipeline.read_frame()
|
|
675
|
+
|
|
676
|
+
if frame is None:
|
|
677
|
+
if not self._stop_streaming:
|
|
678
|
+
# Try to restart pipeline
|
|
679
|
+
self.logger.warning(f"No frame from {stream_key}, restarting...")
|
|
680
|
+
if not pipeline.restart():
|
|
681
|
+
time.sleep(1.0)
|
|
682
|
+
continue
|
|
683
|
+
|
|
684
|
+
frame_counter += 1
|
|
685
|
+
|
|
686
|
+
# ================================================================
|
|
687
|
+
# SHM Mode vs JPEG Mode
|
|
688
|
+
# ================================================================
|
|
689
|
+
if self.use_shm:
|
|
690
|
+
self._process_frame_shm_mode(
|
|
691
|
+
frame=frame,
|
|
692
|
+
stream_key=stream_key,
|
|
693
|
+
stream_group_key=stream_group_key,
|
|
694
|
+
topic=topic,
|
|
695
|
+
width=pipeline.width,
|
|
696
|
+
height=pipeline.height,
|
|
697
|
+
frame_counter=frame_counter,
|
|
698
|
+
camera_location=camera_location,
|
|
699
|
+
)
|
|
700
|
+
else:
|
|
701
|
+
self._process_frame_jpeg_mode(
|
|
702
|
+
frame=frame,
|
|
703
|
+
stream_key=stream_key,
|
|
704
|
+
stream_group_key=stream_group_key,
|
|
705
|
+
topic=topic,
|
|
706
|
+
width=pipeline.width,
|
|
707
|
+
height=pipeline.height,
|
|
708
|
+
quality=quality,
|
|
709
|
+
frame_counter=frame_counter,
|
|
710
|
+
camera_location=camera_location,
|
|
711
|
+
)
|
|
712
|
+
|
|
713
|
+
# Maintain target FPS
|
|
714
|
+
elapsed = time.time() - loop_start
|
|
715
|
+
sleep_time = max(0, frame_interval - elapsed)
|
|
716
|
+
if sleep_time > 0:
|
|
717
|
+
time.sleep(sleep_time)
|
|
718
|
+
|
|
719
|
+
except Exception as e:
|
|
720
|
+
self.logger.error(f"Error in stream loop {stream_key}: {e}")
|
|
721
|
+
time.sleep(0.1)
|
|
722
|
+
|
|
723
|
+
self.logger.info(f"Stream loop stopped for {stream_key}")
|
|
724
|
+
|
|
725
|
+
def _process_frame_jpeg_mode(
|
|
726
|
+
self,
|
|
727
|
+
frame: np.ndarray,
|
|
728
|
+
stream_key: str,
|
|
729
|
+
stream_group_key: str,
|
|
730
|
+
topic: str,
|
|
731
|
+
width: int,
|
|
732
|
+
height: int,
|
|
733
|
+
quality: int,
|
|
734
|
+
frame_counter: int,
|
|
735
|
+
camera_location: str,
|
|
736
|
+
):
|
|
737
|
+
"""Process frame in JPEG mode with frame optimizer.
|
|
738
|
+
|
|
739
|
+
Args:
|
|
740
|
+
frame: Raw frame from FFmpeg
|
|
741
|
+
stream_key: Stream identifier
|
|
742
|
+
stream_group_key: Stream group identifier
|
|
743
|
+
topic: Topic name
|
|
744
|
+
width: Frame width
|
|
745
|
+
height: Frame height
|
|
746
|
+
quality: JPEG quality
|
|
747
|
+
frame_counter: Current frame number
|
|
748
|
+
camera_location: Camera location
|
|
749
|
+
"""
|
|
750
|
+
# Check frame similarity BEFORE encoding
|
|
751
|
+
is_similar, similarity_score = self.frame_optimizer.is_similar(frame, stream_key)
|
|
752
|
+
reference_frame_id = self._last_sent_frame_ids.get(stream_key)
|
|
753
|
+
|
|
754
|
+
import uuid
|
|
755
|
+
|
|
756
|
+
if is_similar and reference_frame_id:
|
|
757
|
+
# Frame is similar - send cached reference
|
|
758
|
+
message = {
|
|
759
|
+
"frame_id": str(uuid.uuid4()),
|
|
760
|
+
"input_name": stream_key,
|
|
761
|
+
"input_stream": {
|
|
762
|
+
"content": b"", # Empty for cached frame
|
|
763
|
+
"metadata": {
|
|
764
|
+
"width": width,
|
|
765
|
+
"height": height,
|
|
766
|
+
"frame_count": frame_counter,
|
|
767
|
+
"camera_location": camera_location,
|
|
768
|
+
"stream_group_key": stream_group_key,
|
|
769
|
+
"encoding_type": "cached",
|
|
770
|
+
"codec": "cached",
|
|
771
|
+
"timestamp": time.time(),
|
|
772
|
+
"similarity_score": similarity_score,
|
|
773
|
+
"cached_frame_id": reference_frame_id,
|
|
774
|
+
},
|
|
775
|
+
},
|
|
776
|
+
}
|
|
777
|
+
|
|
778
|
+
if self.stream_client:
|
|
779
|
+
try:
|
|
780
|
+
self.stream_client.add_message(topic, message, key=stream_key)
|
|
781
|
+
self._transmission_stats["total_frames"] += 1
|
|
782
|
+
except Exception as e:
|
|
783
|
+
self.logger.error(f"Failed to send cached frame: {e}")
|
|
784
|
+
return
|
|
785
|
+
|
|
786
|
+
# Frame is different - encode and send full frame
|
|
787
|
+
success, jpeg_buffer = cv2.imencode(
|
|
788
|
+
'.jpg', frame, [int(cv2.IMWRITE_JPEG_QUALITY), quality]
|
|
789
|
+
)
|
|
790
|
+
|
|
791
|
+
if not success:
|
|
792
|
+
self.logger.warning(f"JPEG encode failed for {stream_key}")
|
|
793
|
+
return
|
|
794
|
+
|
|
795
|
+
frame_data = bytes(jpeg_buffer)
|
|
796
|
+
|
|
797
|
+
# Build and send message
|
|
798
|
+
new_frame_id = str(uuid.uuid4())
|
|
799
|
+
message = {
|
|
800
|
+
"frame_id": new_frame_id,
|
|
801
|
+
"input_name": stream_key,
|
|
802
|
+
"input_stream": {
|
|
803
|
+
"content": frame_data,
|
|
804
|
+
"metadata": {
|
|
805
|
+
"width": width,
|
|
806
|
+
"height": height,
|
|
807
|
+
"frame_count": frame_counter,
|
|
808
|
+
"camera_location": camera_location,
|
|
809
|
+
"stream_group_key": stream_group_key,
|
|
810
|
+
"encoding_type": "jpeg",
|
|
811
|
+
"codec": "h264",
|
|
812
|
+
"timestamp": time.time(),
|
|
813
|
+
},
|
|
814
|
+
},
|
|
815
|
+
}
|
|
816
|
+
|
|
817
|
+
if self.stream_client:
|
|
818
|
+
try:
|
|
819
|
+
self.stream_client.add_message(topic, message, key=stream_key)
|
|
820
|
+
self._transmission_stats["total_frames"] += 1
|
|
821
|
+
self._transmission_stats["total_bytes"] += len(frame_data)
|
|
822
|
+
|
|
823
|
+
# Track this frame_id for future references
|
|
824
|
+
self._last_sent_frame_ids[stream_key] = new_frame_id
|
|
825
|
+
self.frame_optimizer.set_last_frame_id(stream_key, new_frame_id)
|
|
826
|
+
except Exception as e:
|
|
827
|
+
self.logger.error(f"Failed to send frame: {e}")
|
|
828
|
+
|
|
829
|
+
def _process_frame_shm_mode(
|
|
830
|
+
self,
|
|
831
|
+
frame: np.ndarray,
|
|
832
|
+
stream_key: str,
|
|
833
|
+
stream_group_key: str,
|
|
834
|
+
topic: str,
|
|
835
|
+
width: int,
|
|
836
|
+
height: int,
|
|
837
|
+
frame_counter: int,
|
|
838
|
+
camera_location: str,
|
|
839
|
+
):
|
|
840
|
+
"""Process frame in SHM mode - write raw frame to SHM, metadata to Redis.
|
|
841
|
+
|
|
842
|
+
Args:
|
|
843
|
+
frame: Raw frame from FFmpeg (BGR format)
|
|
844
|
+
stream_key: Stream identifier
|
|
845
|
+
stream_group_key: Stream group identifier
|
|
846
|
+
topic: Topic name
|
|
847
|
+
width: Frame width
|
|
848
|
+
height: Frame height
|
|
849
|
+
frame_counter: Current frame number
|
|
850
|
+
camera_location: Camera location
|
|
851
|
+
"""
|
|
852
|
+
# Check frame similarity BEFORE writing to SHM
|
|
853
|
+
is_similar, similarity_score = self.frame_optimizer.is_similar(frame, stream_key)
|
|
854
|
+
reference_frame_idx = self._last_shm_frame_idx.get(stream_key)
|
|
855
|
+
|
|
856
|
+
if is_similar and reference_frame_idx is not None:
|
|
857
|
+
# Frame is similar - send metadata with reference to previous frame
|
|
858
|
+
ts_ns = int(time.time() * 1e9)
|
|
859
|
+
shm_buffer = self._shm_buffers.get(stream_key)
|
|
860
|
+
|
|
861
|
+
if self.stream_client:
|
|
862
|
+
try:
|
|
863
|
+
self.stream_client.add_shm_metadata(
|
|
864
|
+
stream_name=topic,
|
|
865
|
+
cam_id=stream_key,
|
|
866
|
+
shm_name=shm_buffer.shm_name if shm_buffer else "",
|
|
867
|
+
frame_idx=reference_frame_idx,
|
|
868
|
+
slot=None,
|
|
869
|
+
ts_ns=ts_ns,
|
|
870
|
+
width=width,
|
|
871
|
+
height=height,
|
|
872
|
+
format=self.shm_frame_format,
|
|
873
|
+
is_similar=True,
|
|
874
|
+
reference_frame_idx=reference_frame_idx,
|
|
875
|
+
similarity_score=similarity_score,
|
|
876
|
+
stream_group_key=stream_group_key,
|
|
877
|
+
camera_location=camera_location,
|
|
878
|
+
frame_counter=frame_counter,
|
|
879
|
+
)
|
|
880
|
+
self._transmission_stats["total_frames"] += 1
|
|
881
|
+
except Exception as e:
|
|
882
|
+
self.logger.error(f"Failed to send SHM metadata: {e}")
|
|
883
|
+
return
|
|
884
|
+
|
|
885
|
+
# Frame is different - write to SHM
|
|
886
|
+
shm_buffer = self._get_or_create_shm_buffer(stream_key, width, height)
|
|
887
|
+
|
|
888
|
+
# Convert frame to target format
|
|
889
|
+
if self.shm_frame_format == "RGB":
|
|
890
|
+
raw_bytes = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB).tobytes()
|
|
891
|
+
elif self.shm_frame_format == "NV12":
|
|
892
|
+
from matrice_common.stream.shm_ring_buffer import bgr_to_nv12
|
|
893
|
+
raw_bytes = bgr_to_nv12(frame)
|
|
894
|
+
else: # BGR default
|
|
895
|
+
raw_bytes = frame.tobytes()
|
|
896
|
+
|
|
897
|
+
# Write to SHM ring buffer
|
|
898
|
+
frame_idx, slot = shm_buffer.write_frame(raw_bytes)
|
|
899
|
+
self._last_shm_frame_idx[stream_key] = frame_idx
|
|
900
|
+
|
|
901
|
+
# Send metadata to stream backend
|
|
902
|
+
ts_ns = int(time.time() * 1e9)
|
|
903
|
+
if self.stream_client:
|
|
904
|
+
try:
|
|
905
|
+
self.stream_client.add_shm_metadata(
|
|
906
|
+
stream_name=topic,
|
|
907
|
+
cam_id=stream_key,
|
|
908
|
+
shm_name=shm_buffer.shm_name,
|
|
909
|
+
frame_idx=frame_idx,
|
|
910
|
+
slot=slot,
|
|
911
|
+
ts_ns=ts_ns,
|
|
912
|
+
width=width,
|
|
913
|
+
height=height,
|
|
914
|
+
format=self.shm_frame_format,
|
|
915
|
+
is_similar=False,
|
|
916
|
+
stream_group_key=stream_group_key,
|
|
917
|
+
camera_location=camera_location,
|
|
918
|
+
frame_counter=frame_counter,
|
|
919
|
+
)
|
|
920
|
+
self._transmission_stats["total_frames"] += 1
|
|
921
|
+
self._transmission_stats["total_bytes"] += len(raw_bytes)
|
|
922
|
+
except Exception as e:
|
|
923
|
+
self.logger.error(f"Failed to send SHM metadata: {e}")
|
|
924
|
+
|
|
925
|
+
def _build_message(
|
|
926
|
+
self,
|
|
927
|
+
frame_data: bytes,
|
|
928
|
+
stream_key: str,
|
|
929
|
+
stream_group_key: str,
|
|
930
|
+
width: int,
|
|
931
|
+
height: int,
|
|
932
|
+
frame_counter: int,
|
|
933
|
+
camera_location: str,
|
|
934
|
+
) -> Dict[str, Any]:
|
|
935
|
+
"""Build a message for the stream backend.
|
|
936
|
+
|
|
937
|
+
Args:
|
|
938
|
+
frame_data: JPEG encoded frame
|
|
939
|
+
stream_key: Stream identifier
|
|
940
|
+
stream_group_key: Stream group identifier
|
|
941
|
+
width: Frame width
|
|
942
|
+
height: Frame height
|
|
943
|
+
frame_counter: Current frame number
|
|
944
|
+
camera_location: Camera location
|
|
945
|
+
|
|
946
|
+
Returns:
|
|
947
|
+
Message dictionary
|
|
948
|
+
"""
|
|
949
|
+
import uuid
|
|
950
|
+
|
|
951
|
+
return {
|
|
952
|
+
"frame_id": str(uuid.uuid4()),
|
|
953
|
+
"input_name": stream_key,
|
|
954
|
+
"input_stream": {
|
|
955
|
+
"content": frame_data,
|
|
956
|
+
"metadata": {
|
|
957
|
+
"width": width,
|
|
958
|
+
"height": height,
|
|
959
|
+
"frame_count": frame_counter,
|
|
960
|
+
"camera_location": camera_location,
|
|
961
|
+
"stream_group_key": stream_group_key,
|
|
962
|
+
"encoding_type": "jpeg",
|
|
963
|
+
"codec": "h264", # JPEG encoded
|
|
964
|
+
"timestamp": time.time(),
|
|
965
|
+
},
|
|
966
|
+
},
|
|
967
|
+
}
|
|
968
|
+
|
|
969
|
+
def stop_streaming(self, stream_key: Optional[str] = None):
|
|
970
|
+
"""Stop streaming for one or all cameras.
|
|
971
|
+
|
|
972
|
+
Args:
|
|
973
|
+
stream_key: Stream to stop (None = stop all)
|
|
974
|
+
"""
|
|
975
|
+
with self._stream_lock:
|
|
976
|
+
if stream_key:
|
|
977
|
+
# Stop specific stream
|
|
978
|
+
if stream_key in self.pipelines:
|
|
979
|
+
self.pipelines[stream_key].close()
|
|
980
|
+
del self.pipelines[stream_key]
|
|
981
|
+
self.logger.info(f"Stopped stream: {stream_key}")
|
|
982
|
+
|
|
983
|
+
# Cleanup SHM buffer for this stream
|
|
984
|
+
if stream_key in self._shm_buffers:
|
|
985
|
+
try:
|
|
986
|
+
self._shm_buffers[stream_key].close()
|
|
987
|
+
del self._shm_buffers[stream_key]
|
|
988
|
+
self.logger.info(f"Closed SHM buffer for {stream_key}")
|
|
989
|
+
except Exception as e:
|
|
990
|
+
self.logger.warning(f"Failed to close SHM buffer {stream_key}: {e}")
|
|
991
|
+
else:
|
|
992
|
+
# Stop all streams
|
|
993
|
+
self._stop_streaming = True
|
|
994
|
+
|
|
995
|
+
for key, pipeline in list(self.pipelines.items()):
|
|
996
|
+
pipeline.close()
|
|
997
|
+
self.logger.info(f"Stopped stream: {key}")
|
|
998
|
+
|
|
999
|
+
self.pipelines.clear()
|
|
1000
|
+
|
|
1001
|
+
# Wait for threads
|
|
1002
|
+
for thread in self.streaming_threads:
|
|
1003
|
+
if thread.is_alive():
|
|
1004
|
+
thread.join(timeout=5.0)
|
|
1005
|
+
|
|
1006
|
+
self.streaming_threads.clear()
|
|
1007
|
+
|
|
1008
|
+
# Cleanup all SHM buffers
|
|
1009
|
+
if self.use_shm:
|
|
1010
|
+
self._cleanup_shm_buffers()
|
|
1011
|
+
|
|
1012
|
+
self.logger.info("All FFmpeg streams stopped")
|
|
1013
|
+
|
|
1014
|
+
def get_transmission_stats(self) -> Dict[str, Any]:
|
|
1015
|
+
"""Get transmission statistics.
|
|
1016
|
+
|
|
1017
|
+
Returns:
|
|
1018
|
+
Dictionary of statistics
|
|
1019
|
+
"""
|
|
1020
|
+
with self._stream_lock:
|
|
1021
|
+
stats = self._transmission_stats.copy()
|
|
1022
|
+
stats["active_streams"] = len(self.pipelines)
|
|
1023
|
+
stats["pipeline_stats"] = {
|
|
1024
|
+
key: pipeline.get_metrics()
|
|
1025
|
+
for key, pipeline in self.pipelines.items()
|
|
1026
|
+
}
|
|
1027
|
+
|
|
1028
|
+
if stats["start_time"]:
|
|
1029
|
+
elapsed = time.time() - stats["start_time"]
|
|
1030
|
+
stats["avg_fps"] = stats["total_frames"] / elapsed if elapsed > 0 else 0
|
|
1031
|
+
stats["throughput_mbps"] = (stats["total_bytes"] * 8 / 1_000_000) / elapsed if elapsed > 0 else 0
|
|
1032
|
+
|
|
1033
|
+
return stats
|
|
1034
|
+
|
|
1035
|
+
def reset_transmission_stats(self):
|
|
1036
|
+
"""Reset transmission statistics."""
|
|
1037
|
+
with self._stream_lock:
|
|
1038
|
+
self._transmission_stats = {
|
|
1039
|
+
"total_frames": 0,
|
|
1040
|
+
"total_bytes": 0,
|
|
1041
|
+
"start_time": time.time() if self.pipelines else None,
|
|
1042
|
+
}
|
|
1043
|
+
|
|
1044
|
+
def __enter__(self):
|
|
1045
|
+
return self
|
|
1046
|
+
|
|
1047
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
1048
|
+
self.stop_streaming()
|