matrice-streaming 0.1.14__py3-none-any.whl → 0.1.65__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- matrice_streaming/__init__.py +44 -32
- matrice_streaming/streaming_gateway/camera_streamer/__init__.py +68 -1
- matrice_streaming/streaming_gateway/camera_streamer/async_camera_worker.py +1388 -0
- matrice_streaming/streaming_gateway/camera_streamer/async_ffmpeg_worker.py +966 -0
- matrice_streaming/streaming_gateway/camera_streamer/camera_streamer.py +188 -24
- matrice_streaming/streaming_gateway/camera_streamer/device_detection.py +507 -0
- matrice_streaming/streaming_gateway/camera_streamer/encoding_pool_manager.py +136 -0
- matrice_streaming/streaming_gateway/camera_streamer/ffmpeg_camera_streamer.py +1048 -0
- matrice_streaming/streaming_gateway/camera_streamer/ffmpeg_config.py +192 -0
- matrice_streaming/streaming_gateway/camera_streamer/ffmpeg_worker_manager.py +470 -0
- matrice_streaming/streaming_gateway/camera_streamer/gstreamer_camera_streamer.py +1368 -0
- matrice_streaming/streaming_gateway/camera_streamer/gstreamer_worker.py +1063 -0
- matrice_streaming/streaming_gateway/camera_streamer/gstreamer_worker_manager.py +546 -0
- matrice_streaming/streaming_gateway/camera_streamer/message_builder.py +60 -15
- matrice_streaming/streaming_gateway/camera_streamer/nvdec.py +1330 -0
- matrice_streaming/streaming_gateway/camera_streamer/nvdec_worker_manager.py +412 -0
- matrice_streaming/streaming_gateway/camera_streamer/platform_pipelines.py +680 -0
- matrice_streaming/streaming_gateway/camera_streamer/stream_statistics.py +111 -4
- matrice_streaming/streaming_gateway/camera_streamer/video_capture_manager.py +223 -27
- matrice_streaming/streaming_gateway/camera_streamer/worker_manager.py +694 -0
- matrice_streaming/streaming_gateway/debug/__init__.py +27 -2
- matrice_streaming/streaming_gateway/debug/benchmark.py +727 -0
- matrice_streaming/streaming_gateway/debug/debug_gstreamer_gateway.py +599 -0
- matrice_streaming/streaming_gateway/debug/debug_streaming_gateway.py +245 -95
- matrice_streaming/streaming_gateway/debug/debug_utils.py +29 -0
- matrice_streaming/streaming_gateway/debug/test_videoplayback.py +318 -0
- matrice_streaming/streaming_gateway/dynamic_camera_manager.py +656 -39
- matrice_streaming/streaming_gateway/metrics_reporter.py +676 -139
- matrice_streaming/streaming_gateway/streaming_action.py +71 -20
- matrice_streaming/streaming_gateway/streaming_gateway.py +1026 -78
- matrice_streaming/streaming_gateway/streaming_gateway_utils.py +175 -20
- matrice_streaming/streaming_gateway/streaming_status_listener.py +89 -0
- {matrice_streaming-0.1.14.dist-info → matrice_streaming-0.1.65.dist-info}/METADATA +1 -1
- matrice_streaming-0.1.65.dist-info/RECORD +56 -0
- matrice_streaming-0.1.14.dist-info/RECORD +0 -38
- {matrice_streaming-0.1.14.dist-info → matrice_streaming-0.1.65.dist-info}/WHEEL +0 -0
- {matrice_streaming-0.1.14.dist-info → matrice_streaming-0.1.65.dist-info}/licenses/LICENSE.txt +0 -0
- {matrice_streaming-0.1.14.dist-info → matrice_streaming-0.1.65.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,1368 @@
|
|
|
1
|
+
"""GStreamer-based CameraStreamer using hardware/software encoding.
|
|
2
|
+
|
|
3
|
+
This module provides a GStreamer-based alternative to the OpenCV-based CameraStreamer.
|
|
4
|
+
It supports:
|
|
5
|
+
- NVIDIA NVENC hardware encoding (if available)
|
|
6
|
+
- x264 software encoding
|
|
7
|
+
- OpenH264 encoding
|
|
8
|
+
- JPEG encoding
|
|
9
|
+
- Zero-copy CUDA memory pipelines (for NVENC)
|
|
10
|
+
|
|
11
|
+
The flow and API are identical to CameraStreamer for drop-in replacement.
|
|
12
|
+
"""
|
|
13
|
+
import logging
|
|
14
|
+
import threading
|
|
15
|
+
import time
|
|
16
|
+
from dataclasses import dataclass, field
|
|
17
|
+
from typing import Dict, Optional, Union, Any, List
|
|
18
|
+
from collections import deque
|
|
19
|
+
|
|
20
|
+
from matrice_common.stream.matrice_stream import MatriceStream, StreamType
|
|
21
|
+
from matrice_common.optimize import FrameOptimizer
|
|
22
|
+
|
|
23
|
+
from .stream_statistics import StreamStatistics
|
|
24
|
+
from .message_builder import StreamMessageBuilder
|
|
25
|
+
from .retry_manager import RetryManager
|
|
26
|
+
from ..streaming_gateway_utils import StreamingGatewayUtil
|
|
27
|
+
|
|
28
|
+
# GStreamer imports (optional - graceful degradation if not available)
|
|
29
|
+
GST_AVAILABLE = False
|
|
30
|
+
try:
|
|
31
|
+
import gi
|
|
32
|
+
gi.require_version('Gst', '1.0')
|
|
33
|
+
gi.require_version('GstApp', '1.0')
|
|
34
|
+
from gi.repository import Gst, GstApp, GLib
|
|
35
|
+
GST_AVAILABLE = True
|
|
36
|
+
except ImportError as e:
|
|
37
|
+
logging.warning(f"GStreamer not available: {e}. GStreamerCameraStreamer will not work.")
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
@dataclass
|
|
41
|
+
class GStreamerConfig:
|
|
42
|
+
"""Configuration for GStreamer encoding.
|
|
43
|
+
|
|
44
|
+
Default: JPEG for frame-by-frame streaming with maximum performance.
|
|
45
|
+
JPEG provides:
|
|
46
|
+
- No inter-frame dependencies (true frame-by-frame)
|
|
47
|
+
- Consistent per-frame latency
|
|
48
|
+
- Simple frame caching (hash-based works perfectly)
|
|
49
|
+
- Maximum throughput for static scenes
|
|
50
|
+
"""
|
|
51
|
+
# Basic encoder settings
|
|
52
|
+
encoder: str = "jpeg" # Default to JPEG for frame-by-frame (was "auto")
|
|
53
|
+
codec: str = "h264" # h264, h265 (only used for video encoders)
|
|
54
|
+
bitrate: int = 4000000 # 4 Mbps (only used for video encoders)
|
|
55
|
+
preset: str = "low-latency" # nvenc preset (only used for nvenc)
|
|
56
|
+
gpu_id: int = 0
|
|
57
|
+
use_cuda_memory: bool = True
|
|
58
|
+
jpeg_quality: int = 85 # JPEG quality (1-100, higher=better quality)
|
|
59
|
+
gop_size: int = 30 # I-frame interval (only used for video encoders)
|
|
60
|
+
|
|
61
|
+
# Platform-specific settings (NEW)
|
|
62
|
+
platform: str = "auto" # auto, jetson, desktop-gpu, cpu, intel, amd
|
|
63
|
+
enable_platform_override: bool = True # Allow manual platform override
|
|
64
|
+
use_hardware_decode: bool = True # Use nvv4l2decoder, nvdec, vaapi for decode
|
|
65
|
+
use_hardware_jpeg: bool = True # Use nvjpegenc (Jetson), vaapijpegenc (Intel/AMD)
|
|
66
|
+
jetson_use_nvmm: bool = True # NVMM zero-copy memory on Jetson
|
|
67
|
+
frame_optimizer_mode: str = "hash-only" # hash-only, dual-appsink, disabled
|
|
68
|
+
source_optimization: bool = True # Enable source-specific optimizations
|
|
69
|
+
fallback_on_error: bool = True # Graceful fallback to CPU if HW fails
|
|
70
|
+
verbose_pipeline_logging: bool = False # Debug pipeline construction
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
@dataclass
|
|
74
|
+
class GStreamerMetrics:
|
|
75
|
+
"""Metrics for a GStreamer stream."""
|
|
76
|
+
frames_processed: int = 0
|
|
77
|
+
frames_dropped: int = 0
|
|
78
|
+
total_bytes: int = 0
|
|
79
|
+
latencies: List[float] = field(default_factory=list)
|
|
80
|
+
start_time: float = 0.0
|
|
81
|
+
errors: int = 0
|
|
82
|
+
warmup_frames: int = 30
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
class GStreamerPipeline:
|
|
86
|
+
"""Manages a single GStreamer pipeline for video capture and encoding.
|
|
87
|
+
|
|
88
|
+
This class handles:
|
|
89
|
+
- Pipeline construction with automatic encoder detection
|
|
90
|
+
- Video source handling (cameras, files, RTSP, HTTP)
|
|
91
|
+
- Frame pulling from appsink
|
|
92
|
+
- PTS-based latency measurement
|
|
93
|
+
"""
|
|
94
|
+
|
|
95
|
+
def __init__(
|
|
96
|
+
self,
|
|
97
|
+
stream_key: str,
|
|
98
|
+
source: Union[str, int],
|
|
99
|
+
width: int,
|
|
100
|
+
height: int,
|
|
101
|
+
fps: int,
|
|
102
|
+
config: GStreamerConfig,
|
|
103
|
+
):
|
|
104
|
+
"""Initialize GStreamer pipeline.
|
|
105
|
+
|
|
106
|
+
Args:
|
|
107
|
+
stream_key: Unique identifier for this stream
|
|
108
|
+
source: Video source (camera index, file path, RTSP URL, etc.)
|
|
109
|
+
width: Target output width
|
|
110
|
+
height: Target output height
|
|
111
|
+
fps: Target frames per second
|
|
112
|
+
config: GStreamer configuration
|
|
113
|
+
"""
|
|
114
|
+
self.stream_key = stream_key
|
|
115
|
+
self.source = source
|
|
116
|
+
self.width = width
|
|
117
|
+
self.height = height
|
|
118
|
+
self.fps = fps
|
|
119
|
+
self.config = config
|
|
120
|
+
|
|
121
|
+
self.logger = logging.getLogger(f"GStreamerPipeline-{stream_key}")
|
|
122
|
+
|
|
123
|
+
self.pipeline = None
|
|
124
|
+
self.appsink = None
|
|
125
|
+
self.running = False
|
|
126
|
+
|
|
127
|
+
# PTS-based latency tracking
|
|
128
|
+
self.start_pts: Optional[int] = None
|
|
129
|
+
self.wall_start_time: float = 0.0
|
|
130
|
+
|
|
131
|
+
# Metrics
|
|
132
|
+
self.metrics = GStreamerMetrics()
|
|
133
|
+
self._metrics_lock = threading.Lock()
|
|
134
|
+
|
|
135
|
+
# Detected encoder type
|
|
136
|
+
self._encoder_type: Optional[str] = None
|
|
137
|
+
|
|
138
|
+
# Platform detection and pipeline builder (NEW)
|
|
139
|
+
from .device_detection import PlatformDetector
|
|
140
|
+
from .platform_pipelines import PipelineFactory
|
|
141
|
+
|
|
142
|
+
self.platform_detector = PlatformDetector.get_instance()
|
|
143
|
+
self.platform_info = self.platform_detector.detect()
|
|
144
|
+
self.pipeline_builder = PipelineFactory.get_builder(config, self.platform_info)
|
|
145
|
+
|
|
146
|
+
def _detect_encoder(self) -> str:
|
|
147
|
+
"""Detect the best available encoder."""
|
|
148
|
+
if self.config.encoder != "auto":
|
|
149
|
+
return self.config.encoder
|
|
150
|
+
|
|
151
|
+
# Try NVENC first (hardware)
|
|
152
|
+
encoders_to_try = [
|
|
153
|
+
("nvenc", "nvh264enc ! fakesink"),
|
|
154
|
+
("x264", "x264enc ! fakesink"),
|
|
155
|
+
("openh264", "openh264enc ! fakesink"),
|
|
156
|
+
("jpeg", "jpegenc ! fakesink"),
|
|
157
|
+
]
|
|
158
|
+
|
|
159
|
+
for encoder_name, test_pipeline_str in encoders_to_try:
|
|
160
|
+
try:
|
|
161
|
+
test_pipeline = Gst.parse_launch(test_pipeline_str)
|
|
162
|
+
if test_pipeline:
|
|
163
|
+
test_pipeline.set_state(Gst.State.NULL)
|
|
164
|
+
self.logger.info(f"Detected available encoder: {encoder_name}")
|
|
165
|
+
return encoder_name
|
|
166
|
+
except Exception:
|
|
167
|
+
continue
|
|
168
|
+
|
|
169
|
+
self.logger.warning("No hardware encoder found, falling back to x264")
|
|
170
|
+
return "x264"
|
|
171
|
+
|
|
172
|
+
def _detect_source_type(self) -> str:
|
|
173
|
+
"""Detect the type of video source."""
|
|
174
|
+
source_str = str(self.source)
|
|
175
|
+
|
|
176
|
+
if isinstance(self.source, int):
|
|
177
|
+
return "v4l2" # Linux camera
|
|
178
|
+
elif source_str.startswith("rtsp://"):
|
|
179
|
+
return "rtsp"
|
|
180
|
+
elif source_str.startswith(("http://", "https://")):
|
|
181
|
+
return "http"
|
|
182
|
+
elif source_str.endswith((".mp4", ".avi", ".mkv", ".mov", ".webm")):
|
|
183
|
+
return "file"
|
|
184
|
+
else:
|
|
185
|
+
# Assume it's a device path like /dev/video0
|
|
186
|
+
return "v4l2"
|
|
187
|
+
|
|
188
|
+
def _build_source_element(self) -> str:
|
|
189
|
+
"""Build GStreamer source element based on source type."""
|
|
190
|
+
source_type = self._detect_source_type()
|
|
191
|
+
|
|
192
|
+
if source_type == "v4l2":
|
|
193
|
+
# Linux camera
|
|
194
|
+
device = f"/dev/video{self.source}" if isinstance(self.source, int) else self.source
|
|
195
|
+
return f"v4l2src device={device}"
|
|
196
|
+
|
|
197
|
+
elif source_type == "rtsp":
|
|
198
|
+
# RTSP stream with low-latency settings
|
|
199
|
+
return (
|
|
200
|
+
f"rtspsrc location={self.source} latency=100 buffer-mode=auto "
|
|
201
|
+
f"! rtph264depay ! h264parse"
|
|
202
|
+
)
|
|
203
|
+
|
|
204
|
+
elif source_type == "http":
|
|
205
|
+
# HTTP/HTTPS stream - use qtdemux for direct demuxing
|
|
206
|
+
return f"souphttpsrc location={self.source} ! qtdemux ! avdec_h264"
|
|
207
|
+
|
|
208
|
+
elif source_type == "file":
|
|
209
|
+
# Video file - use qtdemux for mp4/mov, matroskademux for mkv
|
|
210
|
+
# This avoids dynamic pad linking issues with decodebin
|
|
211
|
+
ext = self.source.lower().split('.')[-1] if '.' in self.source else ''
|
|
212
|
+
if ext in ('mp4', 'mov', 'm4v'):
|
|
213
|
+
return f"filesrc location={self.source} ! qtdemux ! avdec_h264"
|
|
214
|
+
elif ext in ('mkv', 'webm'):
|
|
215
|
+
return f"filesrc location={self.source} ! matroskademux ! avdec_h264"
|
|
216
|
+
elif ext in ('avi',):
|
|
217
|
+
return f"filesrc location={self.source} ! avidemux ! avdec_h264"
|
|
218
|
+
else:
|
|
219
|
+
# Fallback to multifilesrc for unknown formats
|
|
220
|
+
return f"filesrc location={self.source} ! qtdemux ! avdec_h264"
|
|
221
|
+
|
|
222
|
+
else:
|
|
223
|
+
# Default to videotestsrc for testing
|
|
224
|
+
return "videotestsrc pattern=smpte is-live=true"
|
|
225
|
+
|
|
226
|
+
def _build_encoder_element(self) -> tuple:
|
|
227
|
+
"""Build encoder element and output caps.
|
|
228
|
+
|
|
229
|
+
Returns:
|
|
230
|
+
Tuple of (encoder_string, output_caps)
|
|
231
|
+
"""
|
|
232
|
+
encoder_type = self._detect_encoder()
|
|
233
|
+
self._encoder_type = encoder_type
|
|
234
|
+
bitrate_kbps = self.config.bitrate // 1000
|
|
235
|
+
|
|
236
|
+
if encoder_type == "nvenc":
|
|
237
|
+
# NVIDIA hardware encoder
|
|
238
|
+
if self.config.codec == "h265":
|
|
239
|
+
encoder = "nvh265enc"
|
|
240
|
+
else:
|
|
241
|
+
encoder = "nvh264enc"
|
|
242
|
+
|
|
243
|
+
encoder_settings = (
|
|
244
|
+
f"{encoder} "
|
|
245
|
+
f"cuda-device-id={self.config.gpu_id} "
|
|
246
|
+
f"preset={self.config.preset} "
|
|
247
|
+
f"bitrate={bitrate_kbps} "
|
|
248
|
+
f"gop-size={self.config.gop_size} "
|
|
249
|
+
f"zerolatency=true "
|
|
250
|
+
f"rc-lookahead=0 "
|
|
251
|
+
f"bframes=0 "
|
|
252
|
+
f"rc-mode=cbr-ld-hq "
|
|
253
|
+
)
|
|
254
|
+
caps_out = f"video/x-{self.config.codec},profile=main"
|
|
255
|
+
|
|
256
|
+
elif encoder_type == "x264":
|
|
257
|
+
# x264 software encoder - optimized for throughput
|
|
258
|
+
threads = max(1, min(4, 8)) # Balance threads
|
|
259
|
+
encoder_settings = (
|
|
260
|
+
f"x264enc "
|
|
261
|
+
f"speed-preset=ultrafast "
|
|
262
|
+
f"tune=zerolatency "
|
|
263
|
+
f"bitrate={bitrate_kbps} "
|
|
264
|
+
f"key-int-max={self.config.gop_size} "
|
|
265
|
+
f"bframes=0 "
|
|
266
|
+
f"threads={threads} "
|
|
267
|
+
f"sliced-threads=true "
|
|
268
|
+
f"aud=false "
|
|
269
|
+
f"cabac=false "
|
|
270
|
+
)
|
|
271
|
+
caps_out = "video/x-h264,profile=baseline"
|
|
272
|
+
|
|
273
|
+
elif encoder_type == "openh264":
|
|
274
|
+
# OpenH264 encoder
|
|
275
|
+
encoder_settings = (
|
|
276
|
+
f"openh264enc "
|
|
277
|
+
f"bitrate={self.config.bitrate} "
|
|
278
|
+
f"complexity=low "
|
|
279
|
+
f"rate-control=bitrate "
|
|
280
|
+
)
|
|
281
|
+
caps_out = "video/x-h264,profile=baseline"
|
|
282
|
+
|
|
283
|
+
elif encoder_type == "jpeg":
|
|
284
|
+
# JPEG encoder - per-frame compression
|
|
285
|
+
encoder_settings = (
|
|
286
|
+
f"jpegenc "
|
|
287
|
+
f"quality={self.config.jpeg_quality} "
|
|
288
|
+
f"idct-method=ifast "
|
|
289
|
+
)
|
|
290
|
+
caps_out = "image/jpeg"
|
|
291
|
+
|
|
292
|
+
else:
|
|
293
|
+
raise RuntimeError(f"Unknown encoder type: {encoder_type}")
|
|
294
|
+
|
|
295
|
+
return encoder_settings, caps_out
|
|
296
|
+
|
|
297
|
+
def _build_pipeline_string(self) -> str:
|
|
298
|
+
"""Build platform-optimized GStreamer pipeline string."""
|
|
299
|
+
# Detect source type
|
|
300
|
+
source_type = self._detect_source_type()
|
|
301
|
+
|
|
302
|
+
# Detect encoder (for compatibility)
|
|
303
|
+
encoder = self._detect_encoder()
|
|
304
|
+
self._encoder_type = encoder
|
|
305
|
+
|
|
306
|
+
# Build config dict for pipeline builder
|
|
307
|
+
builder_config = {
|
|
308
|
+
'source_type': source_type,
|
|
309
|
+
'source': self.source,
|
|
310
|
+
'width': self.width,
|
|
311
|
+
'height': self.height,
|
|
312
|
+
'fps': self.fps,
|
|
313
|
+
'encoder': encoder,
|
|
314
|
+
'quality': self.config.jpeg_quality,
|
|
315
|
+
'bitrate': self.config.bitrate,
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
# Use pipeline builder to construct platform-optimized pipeline
|
|
319
|
+
if self.config.frame_optimizer_mode == "dual-appsink":
|
|
320
|
+
pipeline_str = self.pipeline_builder.build_dual_appsink_pipeline(builder_config)
|
|
321
|
+
else:
|
|
322
|
+
pipeline_str = self.pipeline_builder.build_complete_pipeline(builder_config)
|
|
323
|
+
|
|
324
|
+
if self.config.verbose_pipeline_logging:
|
|
325
|
+
self.logger.info(f"Platform: {self.platform_info.platform_type.value}")
|
|
326
|
+
self.logger.info(f"Encoder: {encoder}, Decoder: {self.platform_info.recommended_decoder}")
|
|
327
|
+
self.logger.info(f"Pipeline: {pipeline_str[:200]}...")
|
|
328
|
+
|
|
329
|
+
return pipeline_str
|
|
330
|
+
|
|
331
|
+
def start(self) -> bool:
|
|
332
|
+
"""Start the GStreamer pipeline.
|
|
333
|
+
|
|
334
|
+
Returns:
|
|
335
|
+
bool: True if started successfully
|
|
336
|
+
"""
|
|
337
|
+
if not GST_AVAILABLE:
|
|
338
|
+
raise RuntimeError("GStreamer not available")
|
|
339
|
+
|
|
340
|
+
if self.running:
|
|
341
|
+
self.logger.warning("Pipeline already running")
|
|
342
|
+
return False
|
|
343
|
+
|
|
344
|
+
try:
|
|
345
|
+
# Build and parse pipeline
|
|
346
|
+
pipeline_str = self._build_pipeline_string()
|
|
347
|
+
self.logger.info(f"Creating pipeline: {pipeline_str[:200]}...")
|
|
348
|
+
|
|
349
|
+
self.pipeline = Gst.parse_launch(pipeline_str)
|
|
350
|
+
|
|
351
|
+
# Get appsink (or appsinks for dual-appsink mode)
|
|
352
|
+
self.appsink = self.pipeline.get_by_name("sink")
|
|
353
|
+
if not self.appsink:
|
|
354
|
+
# Try dual-appsink mode naming
|
|
355
|
+
self.appsink = self.pipeline.get_by_name("output-sink")
|
|
356
|
+
if not self.appsink:
|
|
357
|
+
raise RuntimeError("Failed to get appsink from pipeline")
|
|
358
|
+
|
|
359
|
+
# Setup bus for error handling
|
|
360
|
+
bus = self.pipeline.get_bus()
|
|
361
|
+
bus.add_signal_watch()
|
|
362
|
+
bus.connect("message::error", self._on_error)
|
|
363
|
+
bus.connect("message::warning", self._on_warning)
|
|
364
|
+
bus.connect("message::eos", self._on_eos)
|
|
365
|
+
|
|
366
|
+
# Start pipeline
|
|
367
|
+
ret = self.pipeline.set_state(Gst.State.PLAYING)
|
|
368
|
+
if ret == Gst.StateChangeReturn.FAILURE:
|
|
369
|
+
raise RuntimeError("Failed to start pipeline")
|
|
370
|
+
|
|
371
|
+
# Wait for pipeline to be ready
|
|
372
|
+
self.pipeline.get_state(Gst.CLOCK_TIME_NONE)
|
|
373
|
+
|
|
374
|
+
self.running = True
|
|
375
|
+
self.metrics.start_time = time.time()
|
|
376
|
+
|
|
377
|
+
self.logger.info(
|
|
378
|
+
f"Pipeline started - Platform: {self.platform_info.platform_type.value}, "
|
|
379
|
+
f"Encoder: {self._encoder_type}"
|
|
380
|
+
)
|
|
381
|
+
return True
|
|
382
|
+
|
|
383
|
+
except Exception as e:
|
|
384
|
+
self.logger.error(f"Failed to start pipeline: {e}")
|
|
385
|
+
|
|
386
|
+
# Fallback to CPU-only pipeline if enabled
|
|
387
|
+
if self.config.fallback_on_error and self.config.platform != "cpu":
|
|
388
|
+
self.logger.warning("Attempting fallback to CPU-only pipeline...")
|
|
389
|
+
|
|
390
|
+
try:
|
|
391
|
+
# Force CPU-only pipeline
|
|
392
|
+
self.config.platform = "cpu"
|
|
393
|
+
from .platform_pipelines import PipelineFactory, CpuOnlyPipelineBuilder
|
|
394
|
+
self.pipeline_builder = CpuOnlyPipelineBuilder(self.config, self.platform_info)
|
|
395
|
+
|
|
396
|
+
# Retry with CPU pipeline
|
|
397
|
+
return self.start()
|
|
398
|
+
|
|
399
|
+
except Exception as fallback_error:
|
|
400
|
+
self.logger.error(f"Fallback to CPU also failed: {fallback_error}")
|
|
401
|
+
|
|
402
|
+
self.stop()
|
|
403
|
+
raise
|
|
404
|
+
|
|
405
|
+
def stop(self):
|
|
406
|
+
"""Stop the GStreamer pipeline."""
|
|
407
|
+
self.running = False
|
|
408
|
+
|
|
409
|
+
if self.pipeline:
|
|
410
|
+
bus = self.pipeline.get_bus()
|
|
411
|
+
if bus:
|
|
412
|
+
bus.remove_signal_watch()
|
|
413
|
+
self.pipeline.set_state(Gst.State.NULL)
|
|
414
|
+
self.pipeline = None
|
|
415
|
+
|
|
416
|
+
self.appsink = None
|
|
417
|
+
self.logger.info("Pipeline stopped")
|
|
418
|
+
|
|
419
|
+
def pull_frame(self, timeout_ns: int = None) -> Optional[tuple]:
|
|
420
|
+
"""Pull an encoded frame from the pipeline.
|
|
421
|
+
|
|
422
|
+
Args:
|
|
423
|
+
timeout_ns: Timeout in nanoseconds (default: 100ms)
|
|
424
|
+
|
|
425
|
+
Returns:
|
|
426
|
+
Tuple of (frame_data, latency_ms, size) or None if no frame
|
|
427
|
+
"""
|
|
428
|
+
if not self.running or not self.appsink:
|
|
429
|
+
return None
|
|
430
|
+
|
|
431
|
+
timeout_ns = timeout_ns or (Gst.SECOND // 10) # 100ms default for better compatibility
|
|
432
|
+
|
|
433
|
+
try:
|
|
434
|
+
sample = self.appsink.try_pull_sample(timeout_ns)
|
|
435
|
+
if not sample:
|
|
436
|
+
return None
|
|
437
|
+
|
|
438
|
+
now = time.time()
|
|
439
|
+
buffer = sample.get_buffer()
|
|
440
|
+
|
|
441
|
+
# Get buffer data
|
|
442
|
+
size = buffer.get_size()
|
|
443
|
+
success, map_info = buffer.map(Gst.MapFlags.READ)
|
|
444
|
+
if not success:
|
|
445
|
+
return None
|
|
446
|
+
|
|
447
|
+
# Copy frame data (buffer will be reused)
|
|
448
|
+
frame_data = bytes(map_info.data)
|
|
449
|
+
buffer.unmap(map_info)
|
|
450
|
+
|
|
451
|
+
# Calculate latency using PTS
|
|
452
|
+
pts = buffer.pts
|
|
453
|
+
|
|
454
|
+
if self.start_pts is None:
|
|
455
|
+
self.start_pts = pts
|
|
456
|
+
self.wall_start_time = now
|
|
457
|
+
|
|
458
|
+
latency_ms = 0.0
|
|
459
|
+
with self._metrics_lock:
|
|
460
|
+
self.metrics.frames_processed += 1
|
|
461
|
+
self.metrics.total_bytes += size
|
|
462
|
+
|
|
463
|
+
if (self.metrics.frames_processed > self.metrics.warmup_frames
|
|
464
|
+
and pts != Gst.CLOCK_TIME_NONE
|
|
465
|
+
and self.start_pts is not None):
|
|
466
|
+
|
|
467
|
+
stream_time = (pts - self.start_pts) / Gst.SECOND
|
|
468
|
+
wall_time = now - self.wall_start_time
|
|
469
|
+
encode_latency = wall_time - stream_time
|
|
470
|
+
|
|
471
|
+
if encode_latency >= 0:
|
|
472
|
+
latency_ms = encode_latency * 1000
|
|
473
|
+
self.metrics.latencies.append(encode_latency)
|
|
474
|
+
else:
|
|
475
|
+
latency_ms = 1.0 # Floor for early frames
|
|
476
|
+
self.metrics.latencies.append(0.001)
|
|
477
|
+
|
|
478
|
+
return frame_data, latency_ms, size
|
|
479
|
+
|
|
480
|
+
except Exception as e:
|
|
481
|
+
with self._metrics_lock:
|
|
482
|
+
self.metrics.errors += 1
|
|
483
|
+
self.logger.debug(f"Frame pull error: {e}")
|
|
484
|
+
return None
|
|
485
|
+
|
|
486
|
+
def get_metrics(self) -> GStreamerMetrics:
|
|
487
|
+
"""Get pipeline metrics."""
|
|
488
|
+
with self._metrics_lock:
|
|
489
|
+
return GStreamerMetrics(
|
|
490
|
+
frames_processed=self.metrics.frames_processed,
|
|
491
|
+
frames_dropped=self.metrics.frames_dropped,
|
|
492
|
+
total_bytes=self.metrics.total_bytes,
|
|
493
|
+
latencies=list(self.metrics.latencies[-100:]), # Last 100
|
|
494
|
+
start_time=self.metrics.start_time,
|
|
495
|
+
errors=self.metrics.errors,
|
|
496
|
+
)
|
|
497
|
+
|
|
498
|
+
def _on_error(self, bus, message):
|
|
499
|
+
"""Handle pipeline errors."""
|
|
500
|
+
err, debug = message.parse_error()
|
|
501
|
+
self.logger.error(f"Pipeline error: {err}: {debug}")
|
|
502
|
+
with self._metrics_lock:
|
|
503
|
+
self.metrics.errors += 1
|
|
504
|
+
|
|
505
|
+
def _on_warning(self, bus, message):
|
|
506
|
+
"""Handle pipeline warnings."""
|
|
507
|
+
warn, debug = message.parse_warning()
|
|
508
|
+
self.logger.warning(f"Pipeline warning: {warn}: {debug}")
|
|
509
|
+
|
|
510
|
+
def _on_eos(self, bus, message):
|
|
511
|
+
"""Handle end-of-stream."""
|
|
512
|
+
self.logger.info("End of stream received")
|
|
513
|
+
|
|
514
|
+
|
|
515
|
+
class GStreamerCameraStreamer:
|
|
516
|
+
"""GStreamer-based camera streamer with the same API as CameraStreamer.
|
|
517
|
+
|
|
518
|
+
This class provides:
|
|
519
|
+
- Same public API as CameraStreamer for drop-in replacement
|
|
520
|
+
- GStreamer-based video capture and encoding
|
|
521
|
+
- Support for hardware encoding (NVENC) when available
|
|
522
|
+
- Multiple codec support (H.264, H.265, JPEG)
|
|
523
|
+
- Robust retry logic and statistics tracking
|
|
524
|
+
"""
|
|
525
|
+
|
|
526
|
+
def __init__(
|
|
527
|
+
self,
|
|
528
|
+
session,
|
|
529
|
+
service_id: str,
|
|
530
|
+
server_type: str,
|
|
531
|
+
strip_input_content: bool = False,
|
|
532
|
+
video_codec: Optional[str] = None,
|
|
533
|
+
gateway_util: StreamingGatewayUtil = None,
|
|
534
|
+
gstreamer_config: Optional[GStreamerConfig] = None,
|
|
535
|
+
frame_optimizer_enabled: bool = True,
|
|
536
|
+
frame_optimizer_config: Optional[Dict[str, Any]] = None,
|
|
537
|
+
):
|
|
538
|
+
"""Initialize GStreamerCameraStreamer.
|
|
539
|
+
|
|
540
|
+
Args:
|
|
541
|
+
session: Session object for authentication
|
|
542
|
+
service_id: Deployment/gateway ID
|
|
543
|
+
server_type: 'kafka' or 'redis'
|
|
544
|
+
strip_input_content: Strip content for out-of-band retrieval
|
|
545
|
+
video_codec: Video codec override
|
|
546
|
+
gateway_util: Utility for API interactions
|
|
547
|
+
gstreamer_config: GStreamer-specific configuration
|
|
548
|
+
frame_optimizer_enabled: Enable frame optimization to skip similar frames
|
|
549
|
+
frame_optimizer_config: Configuration for FrameOptimizer (scale, diff_threshold, etc.)
|
|
550
|
+
"""
|
|
551
|
+
if not GST_AVAILABLE:
|
|
552
|
+
raise RuntimeError(
|
|
553
|
+
"GStreamer is not available. Please install GStreamer and PyGObject: "
|
|
554
|
+
"pip install PyGObject && apt-get install gstreamer1.0-plugins-*"
|
|
555
|
+
)
|
|
556
|
+
|
|
557
|
+
self.session = session
|
|
558
|
+
self.service_id = service_id
|
|
559
|
+
self.server_type = server_type.lower()
|
|
560
|
+
self.gateway_util = gateway_util
|
|
561
|
+
self.gstreamer_config = gstreamer_config or GStreamerConfig()
|
|
562
|
+
|
|
563
|
+
# Initialize GStreamer
|
|
564
|
+
Gst.init(None)
|
|
565
|
+
|
|
566
|
+
# Initialize modular components
|
|
567
|
+
self.statistics = StreamStatistics()
|
|
568
|
+
self.message_builder = StreamMessageBuilder(service_id, strip_input_content)
|
|
569
|
+
|
|
570
|
+
# Initialize frame optimizer for skipping similar frames
|
|
571
|
+
optimizer_config = frame_optimizer_config or {}
|
|
572
|
+
self.frame_optimizer = FrameOptimizer(
|
|
573
|
+
enabled=frame_optimizer_enabled,
|
|
574
|
+
scale=optimizer_config.get("scale", 0.4),
|
|
575
|
+
diff_threshold=optimizer_config.get("diff_threshold", 15),
|
|
576
|
+
similarity_threshold=optimizer_config.get("similarity_threshold", 0.05),
|
|
577
|
+
bg_update_interval=optimizer_config.get("bg_update_interval", 10),
|
|
578
|
+
)
|
|
579
|
+
self._last_sent_frame_ids: Dict[str, str] = {} # stream_key -> last sent frame_id
|
|
580
|
+
|
|
581
|
+
# Map video_codec to GStreamer config
|
|
582
|
+
if video_codec:
|
|
583
|
+
self._configure_codec(video_codec)
|
|
584
|
+
|
|
585
|
+
# Pipeline management
|
|
586
|
+
self.pipelines: Dict[str, GStreamerPipeline] = {}
|
|
587
|
+
self.streaming_threads: List[threading.Thread] = []
|
|
588
|
+
self._stop_streaming = False
|
|
589
|
+
|
|
590
|
+
# Topic management
|
|
591
|
+
self.stream_topics: Dict[str, str] = {}
|
|
592
|
+
self.setup_topics = set()
|
|
593
|
+
|
|
594
|
+
# Metrics logging
|
|
595
|
+
self._last_metrics_log_time = time.time()
|
|
596
|
+
self._metrics_log_interval = 30.0
|
|
597
|
+
|
|
598
|
+
# Connection management (for refresh_connection_info support)
|
|
599
|
+
self._connection_lock = threading.RLock()
|
|
600
|
+
self._send_failure_count = 0
|
|
601
|
+
self._last_connection_refresh_time = 0.0
|
|
602
|
+
self.connection_refresh_threshold = 10 # Number of failures before refresh
|
|
603
|
+
self.connection_refresh_interval = 60.0 # Minimum seconds between refreshes
|
|
604
|
+
|
|
605
|
+
# Initialize MatriceStream
|
|
606
|
+
if self.gateway_util:
|
|
607
|
+
self.stream_config = self.gateway_util.get_and_wait_for_connection_info(
|
|
608
|
+
server_type=self.server_type
|
|
609
|
+
)
|
|
610
|
+
else:
|
|
611
|
+
self.stream_config = {}
|
|
612
|
+
|
|
613
|
+
# Add Redis configuration
|
|
614
|
+
if self.server_type == "redis":
|
|
615
|
+
self.stream_config.update({
|
|
616
|
+
'pool_max_connections': 500,
|
|
617
|
+
'enable_batching': True,
|
|
618
|
+
'batch_size': 10,
|
|
619
|
+
'batch_timeout': 0.01
|
|
620
|
+
})
|
|
621
|
+
|
|
622
|
+
self.matrice_stream = MatriceStream(
|
|
623
|
+
StreamType.REDIS if self.server_type == "redis" else StreamType.KAFKA,
|
|
624
|
+
**self.stream_config
|
|
625
|
+
)
|
|
626
|
+
|
|
627
|
+
self.logger = logging.getLogger(__name__)
|
|
628
|
+
self.logger.info(
|
|
629
|
+
f"GStreamerCameraStreamer initialized - encoder: {self.gstreamer_config.encoder}, "
|
|
630
|
+
f"codec: {self.gstreamer_config.codec}"
|
|
631
|
+
)
|
|
632
|
+
|
|
633
|
+
def _configure_codec(self, video_codec: str):
|
|
634
|
+
"""Configure GStreamer based on video codec string."""
|
|
635
|
+
vc = video_codec.lower().strip()
|
|
636
|
+
|
|
637
|
+
if vc in ("h264", "h264-frame"):
|
|
638
|
+
self.gstreamer_config.codec = "h264"
|
|
639
|
+
elif vc in ("h265", "h265-frame", "hevc"):
|
|
640
|
+
self.gstreamer_config.codec = "h265"
|
|
641
|
+
elif vc == "jpeg":
|
|
642
|
+
self.gstreamer_config.encoder = "jpeg"
|
|
643
|
+
|
|
644
|
+
# ========================================================================
|
|
645
|
+
# Public API - Topic Management (same as CameraStreamer)
|
|
646
|
+
# ========================================================================
|
|
647
|
+
|
|
648
|
+
def register_stream_topic(self, stream_key: str, topic: str):
|
|
649
|
+
"""Register a topic for a specific stream key."""
|
|
650
|
+
self.stream_topics[stream_key] = topic
|
|
651
|
+
self.logger.info(f"Registered topic '{topic}' for stream '{stream_key}'")
|
|
652
|
+
|
|
653
|
+
def get_topic_for_stream(self, stream_key: str) -> Optional[str]:
|
|
654
|
+
"""Get the topic for a specific stream key."""
|
|
655
|
+
return self.stream_topics.get(stream_key)
|
|
656
|
+
|
|
657
|
+
def setup_stream_for_topic(self, topic: str) -> bool:
|
|
658
|
+
"""Setup MatriceStream for a topic."""
|
|
659
|
+
try:
|
|
660
|
+
if topic not in self.setup_topics:
|
|
661
|
+
self.matrice_stream.setup(topic)
|
|
662
|
+
self.setup_topics.add(topic)
|
|
663
|
+
self.logger.info(f"MatriceStream setup complete for topic: {topic}")
|
|
664
|
+
return True
|
|
665
|
+
except Exception as e:
|
|
666
|
+
self.logger.error(f"Failed to setup MatriceStream for topic {topic}: {e}")
|
|
667
|
+
return False
|
|
668
|
+
|
|
669
|
+
# ========================================================================
|
|
670
|
+
# Public API - Streaming Control
|
|
671
|
+
# ========================================================================
|
|
672
|
+
|
|
673
|
+
def start_stream(
|
|
674
|
+
self,
|
|
675
|
+
input: Union[str, int],
|
|
676
|
+
fps: int = 10,
|
|
677
|
+
stream_key: Optional[str] = None,
|
|
678
|
+
stream_group_key: Optional[str] = None,
|
|
679
|
+
quality: int = 95,
|
|
680
|
+
width: Optional[int] = None,
|
|
681
|
+
height: Optional[int] = None,
|
|
682
|
+
simulate_video_file_stream: bool = False,
|
|
683
|
+
is_video_chunk: bool = False,
|
|
684
|
+
chunk_duration_seconds: Optional[float] = None,
|
|
685
|
+
chunk_frames: Optional[int] = None,
|
|
686
|
+
camera_location: Optional[str] = None,
|
|
687
|
+
) -> bool:
|
|
688
|
+
"""Start streaming in current thread (blocking)."""
|
|
689
|
+
try:
|
|
690
|
+
topic = self.get_topic_for_stream(stream_key)
|
|
691
|
+
if not topic:
|
|
692
|
+
self.logger.error(f"No topic registered for stream {stream_key}")
|
|
693
|
+
return False
|
|
694
|
+
|
|
695
|
+
self._stream_loop(
|
|
696
|
+
input, stream_key or "default", stream_group_key or "default",
|
|
697
|
+
topic, fps, quality, width or 640, height or 480,
|
|
698
|
+
simulate_video_file_stream, camera_location or "Unknown"
|
|
699
|
+
)
|
|
700
|
+
return True
|
|
701
|
+
|
|
702
|
+
except Exception as e:
|
|
703
|
+
self.logger.error(f"Failed to start stream: {e}", exc_info=True)
|
|
704
|
+
return False
|
|
705
|
+
|
|
706
|
+
def start_background_stream(
|
|
707
|
+
self,
|
|
708
|
+
input: Union[str, int],
|
|
709
|
+
fps: int = 10,
|
|
710
|
+
stream_key: Optional[str] = None,
|
|
711
|
+
stream_group_key: Optional[str] = None,
|
|
712
|
+
quality: int = 95,
|
|
713
|
+
width: Optional[int] = None,
|
|
714
|
+
height: Optional[int] = None,
|
|
715
|
+
simulate_video_file_stream: bool = False,
|
|
716
|
+
is_video_chunk: bool = False,
|
|
717
|
+
chunk_duration_seconds: Optional[float] = None,
|
|
718
|
+
chunk_frames: Optional[int] = None,
|
|
719
|
+
camera_location: Optional[str] = None,
|
|
720
|
+
) -> bool:
|
|
721
|
+
"""Start streaming in background thread (non-blocking)."""
|
|
722
|
+
try:
|
|
723
|
+
topic = self.get_topic_for_stream(stream_key)
|
|
724
|
+
if not topic:
|
|
725
|
+
self.logger.error(f"No topic registered for stream {stream_key}")
|
|
726
|
+
return False
|
|
727
|
+
|
|
728
|
+
thread = threading.Thread(
|
|
729
|
+
target=self._stream_loop,
|
|
730
|
+
args=(
|
|
731
|
+
input, stream_key or "default", stream_group_key or "default",
|
|
732
|
+
topic, fps, quality, width or 640, height or 480,
|
|
733
|
+
simulate_video_file_stream, camera_location or "Unknown"
|
|
734
|
+
),
|
|
735
|
+
daemon=True
|
|
736
|
+
)
|
|
737
|
+
|
|
738
|
+
self.streaming_threads.append(thread)
|
|
739
|
+
thread.start()
|
|
740
|
+
self.logger.info(f"Started GStreamer background stream for {stream_key}")
|
|
741
|
+
return True
|
|
742
|
+
|
|
743
|
+
except Exception as e:
|
|
744
|
+
self.logger.error(f"Failed to start background stream: {e}")
|
|
745
|
+
return False
|
|
746
|
+
|
|
747
|
+
def stop_streaming(self):
|
|
748
|
+
"""Stop all streaming threads and pipelines."""
|
|
749
|
+
self._stop_streaming = True
|
|
750
|
+
|
|
751
|
+
# Stop all pipelines
|
|
752
|
+
for stream_key, pipeline in list(self.pipelines.items()):
|
|
753
|
+
try:
|
|
754
|
+
pipeline.stop()
|
|
755
|
+
except Exception as e:
|
|
756
|
+
self.logger.error(f"Error stopping pipeline {stream_key}: {e}")
|
|
757
|
+
self.pipelines.clear()
|
|
758
|
+
|
|
759
|
+
# Wait for threads
|
|
760
|
+
for thread in self.streaming_threads:
|
|
761
|
+
if thread.is_alive():
|
|
762
|
+
thread.join(timeout=5.0)
|
|
763
|
+
self.streaming_threads.clear()
|
|
764
|
+
|
|
765
|
+
self._stop_streaming = False
|
|
766
|
+
self.logger.info("All GStreamer streams stopped")
|
|
767
|
+
|
|
768
|
+
# ========================================================================
|
|
769
|
+
# Public API - Statistics
|
|
770
|
+
# ========================================================================
|
|
771
|
+
|
|
772
|
+
def get_transmission_stats(self) -> Dict[str, Any]:
|
|
773
|
+
"""Get transmission statistics."""
|
|
774
|
+
# Aggregate metrics from all pipelines
|
|
775
|
+
total_frames = 0
|
|
776
|
+
total_bytes = 0
|
|
777
|
+
all_latencies = []
|
|
778
|
+
total_errors = 0
|
|
779
|
+
|
|
780
|
+
for pipeline in self.pipelines.values():
|
|
781
|
+
metrics = pipeline.get_metrics()
|
|
782
|
+
total_frames += metrics.frames_processed
|
|
783
|
+
total_bytes += metrics.total_bytes
|
|
784
|
+
all_latencies.extend(metrics.latencies)
|
|
785
|
+
total_errors += metrics.errors
|
|
786
|
+
|
|
787
|
+
stats = self.statistics.get_transmission_stats(
|
|
788
|
+
f"gstreamer-{self.gstreamer_config.codec}",
|
|
789
|
+
len(self.streaming_threads)
|
|
790
|
+
)
|
|
791
|
+
|
|
792
|
+
# Add GStreamer-specific stats
|
|
793
|
+
stats["gstreamer"] = {
|
|
794
|
+
"encoder": self.gstreamer_config.encoder,
|
|
795
|
+
"codec": self.gstreamer_config.codec,
|
|
796
|
+
"total_frames": total_frames,
|
|
797
|
+
"total_bytes": total_bytes,
|
|
798
|
+
"total_errors": total_errors,
|
|
799
|
+
"avg_latency_ms": (
|
|
800
|
+
sum(all_latencies) / len(all_latencies) * 1000
|
|
801
|
+
if all_latencies else 0
|
|
802
|
+
),
|
|
803
|
+
}
|
|
804
|
+
|
|
805
|
+
return stats
|
|
806
|
+
|
|
807
|
+
def reset_transmission_stats(self):
|
|
808
|
+
"""Reset transmission statistics."""
|
|
809
|
+
self.statistics.reset()
|
|
810
|
+
|
|
811
|
+
# ========================================================================
|
|
812
|
+
# Public API - Message Production
|
|
813
|
+
# ========================================================================
|
|
814
|
+
|
|
815
|
+
def produce_request(
|
|
816
|
+
self,
|
|
817
|
+
input_data: bytes,
|
|
818
|
+
stream_key: Optional[str] = None,
|
|
819
|
+
stream_group_key: Optional[str] = None,
|
|
820
|
+
metadata: Optional[Dict] = None,
|
|
821
|
+
topic: Optional[str] = None,
|
|
822
|
+
timeout: float = 60.0,
|
|
823
|
+
) -> bool:
|
|
824
|
+
"""Produce a stream request to MatriceStream (synchronous).
|
|
825
|
+
|
|
826
|
+
Args:
|
|
827
|
+
input_data: Frame data bytes
|
|
828
|
+
stream_key: Stream identifier
|
|
829
|
+
stream_group_key: Stream group identifier
|
|
830
|
+
metadata: Optional metadata dictionary
|
|
831
|
+
topic: Optional topic override
|
|
832
|
+
timeout: Timeout in seconds
|
|
833
|
+
|
|
834
|
+
Returns:
|
|
835
|
+
bool: True if successful, False otherwise
|
|
836
|
+
"""
|
|
837
|
+
try:
|
|
838
|
+
actual_topic = topic or self.get_topic_for_stream(stream_key) or "default_topic"
|
|
839
|
+
metadata = metadata or {}
|
|
840
|
+
|
|
841
|
+
last_read, last_write, last_process = self.statistics.get_timing(stream_key or "default")
|
|
842
|
+
input_order = self.statistics.get_next_input_order(stream_key or "default")
|
|
843
|
+
|
|
844
|
+
# Determine codec from metadata or config
|
|
845
|
+
codec = metadata.get("video_codec") or ("jpeg" if self.gstreamer_config.encoder == "jpeg" else self.gstreamer_config.codec)
|
|
846
|
+
|
|
847
|
+
message = self.message_builder.build_message(
|
|
848
|
+
input_data, stream_key or "default", stream_group_key or "default",
|
|
849
|
+
codec, metadata, actual_topic,
|
|
850
|
+
self.matrice_stream.config.get('bootstrap_servers', 'localhost'),
|
|
851
|
+
input_order, last_read, last_write, last_process
|
|
852
|
+
)
|
|
853
|
+
|
|
854
|
+
self.matrice_stream.add_message(
|
|
855
|
+
topic_or_channel=actual_topic,
|
|
856
|
+
message=message,
|
|
857
|
+
key=str(stream_key)
|
|
858
|
+
)
|
|
859
|
+
|
|
860
|
+
self._send_failure_count = 0
|
|
861
|
+
return True
|
|
862
|
+
except Exception as e:
|
|
863
|
+
self.logger.error(f"Failed to produce request: {e}")
|
|
864
|
+
self._send_failure_count += 1
|
|
865
|
+
return False
|
|
866
|
+
|
|
867
|
+
async def async_produce_request(
|
|
868
|
+
self,
|
|
869
|
+
input_data: bytes,
|
|
870
|
+
stream_key: Optional[str] = None,
|
|
871
|
+
stream_group_key: Optional[str] = None,
|
|
872
|
+
metadata: Optional[Dict] = None,
|
|
873
|
+
topic: Optional[str] = None,
|
|
874
|
+
timeout: float = 60.0,
|
|
875
|
+
) -> bool:
|
|
876
|
+
"""Produce a stream request to MatriceStream (asynchronous).
|
|
877
|
+
|
|
878
|
+
Args:
|
|
879
|
+
input_data: Frame data bytes
|
|
880
|
+
stream_key: Stream identifier
|
|
881
|
+
stream_group_key: Stream group identifier
|
|
882
|
+
metadata: Optional metadata dictionary
|
|
883
|
+
topic: Optional topic override
|
|
884
|
+
timeout: Timeout in seconds
|
|
885
|
+
|
|
886
|
+
Returns:
|
|
887
|
+
bool: True if successful, False otherwise
|
|
888
|
+
"""
|
|
889
|
+
try:
|
|
890
|
+
actual_topic = topic or self.get_topic_for_stream(stream_key) or "default_topic"
|
|
891
|
+
metadata = metadata or {}
|
|
892
|
+
|
|
893
|
+
last_read, last_write, last_process = self.statistics.get_timing(stream_key or "default")
|
|
894
|
+
input_order = self.statistics.get_next_input_order(stream_key or "default")
|
|
895
|
+
|
|
896
|
+
# Determine codec from metadata or config
|
|
897
|
+
codec = metadata.get("video_codec") or ("jpeg" if self.gstreamer_config.encoder == "jpeg" else self.gstreamer_config.codec)
|
|
898
|
+
|
|
899
|
+
message = self.message_builder.build_message(
|
|
900
|
+
input_data, stream_key or "default", stream_group_key or "default",
|
|
901
|
+
codec, metadata, actual_topic,
|
|
902
|
+
self.matrice_stream.config.get('bootstrap_servers', 'localhost'),
|
|
903
|
+
input_order, last_read, last_write, last_process
|
|
904
|
+
)
|
|
905
|
+
|
|
906
|
+
if not self.matrice_stream.is_async_setup():
|
|
907
|
+
await self.matrice_stream.async_setup(actual_topic)
|
|
908
|
+
|
|
909
|
+
await self.matrice_stream.async_add_message(
|
|
910
|
+
topic_or_channel=actual_topic,
|
|
911
|
+
message=message,
|
|
912
|
+
key=str(stream_key)
|
|
913
|
+
)
|
|
914
|
+
|
|
915
|
+
self._send_failure_count = 0
|
|
916
|
+
return True
|
|
917
|
+
except Exception as e:
|
|
918
|
+
self.logger.error(f"Failed to async produce request: {e}")
|
|
919
|
+
self._send_failure_count += 1
|
|
920
|
+
return False
|
|
921
|
+
|
|
922
|
+
# ========================================================================
|
|
923
|
+
# Public API - Connection Management
|
|
924
|
+
# ========================================================================
|
|
925
|
+
|
|
926
|
+
def refresh_connection_info(self) -> bool:
|
|
927
|
+
"""Refresh connection info from API and reinitialize MatriceStream.
|
|
928
|
+
|
|
929
|
+
This method checks the server connection info from the API and if it has changed,
|
|
930
|
+
it reinitializes the MatriceStream with the new connection details.
|
|
931
|
+
|
|
932
|
+
Returns:
|
|
933
|
+
bool: True if connection was refreshed successfully
|
|
934
|
+
"""
|
|
935
|
+
if not self.gateway_util:
|
|
936
|
+
self.logger.warning("Cannot refresh connection: no gateway_util provided")
|
|
937
|
+
return False
|
|
938
|
+
|
|
939
|
+
with self._connection_lock:
|
|
940
|
+
current_time = time.time()
|
|
941
|
+
|
|
942
|
+
# Check if enough time has passed since last refresh
|
|
943
|
+
if current_time - self._last_connection_refresh_time < self.connection_refresh_interval:
|
|
944
|
+
self.logger.debug(
|
|
945
|
+
f"Skipping connection refresh, last refresh was {current_time - self._last_connection_refresh_time:.1f}s ago"
|
|
946
|
+
)
|
|
947
|
+
return False
|
|
948
|
+
|
|
949
|
+
try:
|
|
950
|
+
self.logger.info("Attempting to refresh connection info from API...")
|
|
951
|
+
|
|
952
|
+
# Fetch new connection info
|
|
953
|
+
new_connection_info = self.gateway_util.get_and_wait_for_connection_info(
|
|
954
|
+
server_type=self.server_type,
|
|
955
|
+
connection_timeout=300
|
|
956
|
+
)
|
|
957
|
+
|
|
958
|
+
if not new_connection_info:
|
|
959
|
+
self.logger.error("Failed to fetch new connection info")
|
|
960
|
+
return False
|
|
961
|
+
|
|
962
|
+
# Check if connection info has changed
|
|
963
|
+
if new_connection_info == self.stream_config:
|
|
964
|
+
self.logger.info("Connection info unchanged, no refresh needed")
|
|
965
|
+
self._last_connection_refresh_time = current_time
|
|
966
|
+
return True
|
|
967
|
+
|
|
968
|
+
self.logger.warning("Connection info has changed! Reinitializing MatriceStream...")
|
|
969
|
+
|
|
970
|
+
# Close existing stream
|
|
971
|
+
try:
|
|
972
|
+
self.matrice_stream.close()
|
|
973
|
+
self.logger.debug("Closed old MatriceStream connection")
|
|
974
|
+
except Exception as e:
|
|
975
|
+
self.logger.warning(f"Error closing old stream: {e}")
|
|
976
|
+
|
|
977
|
+
# Update config and reinitialize
|
|
978
|
+
self.stream_config = new_connection_info
|
|
979
|
+
|
|
980
|
+
# Add Redis batching config if needed
|
|
981
|
+
if self.server_type == "redis":
|
|
982
|
+
self.stream_config.update({
|
|
983
|
+
'pool_max_connections': 500,
|
|
984
|
+
'enable_batching': True,
|
|
985
|
+
'batch_size': 10,
|
|
986
|
+
'batch_timeout': 0.01
|
|
987
|
+
})
|
|
988
|
+
|
|
989
|
+
self.matrice_stream = MatriceStream(
|
|
990
|
+
StreamType.REDIS if self.server_type == "redis" else StreamType.KAFKA,
|
|
991
|
+
**self.stream_config
|
|
992
|
+
)
|
|
993
|
+
self.logger.info("MatriceStream reinitialized with new connection config")
|
|
994
|
+
|
|
995
|
+
# Re-setup all topics
|
|
996
|
+
topics_to_setup = list(self.setup_topics)
|
|
997
|
+
self.setup_topics.clear()
|
|
998
|
+
|
|
999
|
+
for topic in topics_to_setup:
|
|
1000
|
+
try:
|
|
1001
|
+
self.matrice_stream.setup(topic)
|
|
1002
|
+
self.setup_topics.add(topic)
|
|
1003
|
+
self.logger.info(f"Re-setup topic: {topic}")
|
|
1004
|
+
except Exception as e:
|
|
1005
|
+
self.logger.error(f"Failed to re-setup topic {topic}: {e}")
|
|
1006
|
+
|
|
1007
|
+
# Reset failure count and update refresh time
|
|
1008
|
+
self._send_failure_count = 0
|
|
1009
|
+
self._last_connection_refresh_time = current_time
|
|
1010
|
+
|
|
1011
|
+
self.logger.info("Connection info refreshed and MatriceStream reinitialized successfully!")
|
|
1012
|
+
return True
|
|
1013
|
+
|
|
1014
|
+
except Exception as e:
|
|
1015
|
+
self.logger.error(f"Error refreshing connection info: {e}", exc_info=True)
|
|
1016
|
+
return False
|
|
1017
|
+
|
|
1018
|
+
# ========================================================================
|
|
1019
|
+
# Private Methods - Main Streaming Loop
|
|
1020
|
+
# ========================================================================
|
|
1021
|
+
|
|
1022
|
+
def _stream_loop(
|
|
1023
|
+
self,
|
|
1024
|
+
source: Union[str, int],
|
|
1025
|
+
stream_key: str,
|
|
1026
|
+
stream_group_key: str,
|
|
1027
|
+
topic: str,
|
|
1028
|
+
fps: int,
|
|
1029
|
+
quality: int,
|
|
1030
|
+
width: int,
|
|
1031
|
+
height: int,
|
|
1032
|
+
simulate_video_file_stream: bool,
|
|
1033
|
+
camera_location: str
|
|
1034
|
+
):
|
|
1035
|
+
"""Main streaming loop with GStreamer pipeline."""
|
|
1036
|
+
retry_mgr = RetryManager(stream_key)
|
|
1037
|
+
|
|
1038
|
+
# Setup topic
|
|
1039
|
+
if not self.setup_stream_for_topic(topic):
|
|
1040
|
+
self.logger.error(f"Failed to setup topic {topic}")
|
|
1041
|
+
return
|
|
1042
|
+
|
|
1043
|
+
# Detect source type for proper handling of video file end-of-stream
|
|
1044
|
+
source_str = str(source)
|
|
1045
|
+
is_video_file = source_str.endswith(('.mp4', '.avi', '.mkv', '.mov', '.webm'))
|
|
1046
|
+
|
|
1047
|
+
# Configure JPEG quality if using JPEG encoder
|
|
1048
|
+
config = GStreamerConfig(
|
|
1049
|
+
encoder=self.gstreamer_config.encoder,
|
|
1050
|
+
codec=self.gstreamer_config.codec,
|
|
1051
|
+
bitrate=self.gstreamer_config.bitrate,
|
|
1052
|
+
preset=self.gstreamer_config.preset,
|
|
1053
|
+
gpu_id=self.gstreamer_config.gpu_id,
|
|
1054
|
+
use_cuda_memory=self.gstreamer_config.use_cuda_memory,
|
|
1055
|
+
jpeg_quality=quality,
|
|
1056
|
+
gop_size=self.gstreamer_config.gop_size,
|
|
1057
|
+
)
|
|
1058
|
+
|
|
1059
|
+
# OUTER LOOP: Retry forever
|
|
1060
|
+
while not self._stop_streaming:
|
|
1061
|
+
pipeline = None
|
|
1062
|
+
|
|
1063
|
+
try:
|
|
1064
|
+
# Create pipeline
|
|
1065
|
+
pipeline = GStreamerPipeline(
|
|
1066
|
+
stream_key=stream_key,
|
|
1067
|
+
source=source,
|
|
1068
|
+
width=width,
|
|
1069
|
+
height=height,
|
|
1070
|
+
fps=fps,
|
|
1071
|
+
config=config,
|
|
1072
|
+
)
|
|
1073
|
+
|
|
1074
|
+
pipeline.start()
|
|
1075
|
+
self.pipelines[stream_key] = pipeline
|
|
1076
|
+
retry_mgr.handle_successful_reconnect()
|
|
1077
|
+
|
|
1078
|
+
# INNER LOOP: Process frames
|
|
1079
|
+
frame_interval = 1.0 / fps
|
|
1080
|
+
|
|
1081
|
+
while not self._stop_streaming:
|
|
1082
|
+
loop_start = time.time()
|
|
1083
|
+
|
|
1084
|
+
# Check for EOS (End-of-Stream) from GStreamer bus
|
|
1085
|
+
# This happens when video files reach the end
|
|
1086
|
+
bus = pipeline.pipeline.get_bus()
|
|
1087
|
+
if bus:
|
|
1088
|
+
msg = bus.pop_filtered(Gst.MessageType.EOS)
|
|
1089
|
+
if msg:
|
|
1090
|
+
if is_video_file:
|
|
1091
|
+
if simulate_video_file_stream:
|
|
1092
|
+
self.logger.info(
|
|
1093
|
+
f"Video {stream_key} reached end, restarting (simulate_video_file_stream=True)"
|
|
1094
|
+
)
|
|
1095
|
+
break # Break inner loop to restart in outer loop
|
|
1096
|
+
else:
|
|
1097
|
+
self.logger.info(
|
|
1098
|
+
f"Video {stream_key} playback complete (simulate_video_file_stream=False)"
|
|
1099
|
+
)
|
|
1100
|
+
return # Exit streaming loop completely
|
|
1101
|
+
else:
|
|
1102
|
+
# Camera EOS is unexpected - treat as error
|
|
1103
|
+
self.logger.warning(
|
|
1104
|
+
f"Unexpected EOS from camera {stream_key}, will reconnect"
|
|
1105
|
+
)
|
|
1106
|
+
break
|
|
1107
|
+
|
|
1108
|
+
# Pull encoded frame
|
|
1109
|
+
read_start = time.time()
|
|
1110
|
+
result = pipeline.pull_frame()
|
|
1111
|
+
read_time = time.time() - read_start
|
|
1112
|
+
|
|
1113
|
+
if result is None:
|
|
1114
|
+
retry_mgr.record_read_failure()
|
|
1115
|
+
if retry_mgr.should_reconnect():
|
|
1116
|
+
break
|
|
1117
|
+
time.sleep(0.001)
|
|
1118
|
+
continue
|
|
1119
|
+
|
|
1120
|
+
frame_data, latency_ms, frame_size = result
|
|
1121
|
+
retry_mgr.record_success()
|
|
1122
|
+
|
|
1123
|
+
# Build and send message
|
|
1124
|
+
self._process_and_send_frame(
|
|
1125
|
+
frame_data, stream_key, stream_group_key, topic,
|
|
1126
|
+
fps, quality, width, height, camera_location,
|
|
1127
|
+
frame_size, latency_ms, read_time
|
|
1128
|
+
)
|
|
1129
|
+
|
|
1130
|
+
# Periodic metrics logging
|
|
1131
|
+
self._maybe_log_metrics(stream_key)
|
|
1132
|
+
|
|
1133
|
+
# Maintain FPS
|
|
1134
|
+
elapsed = time.time() - loop_start
|
|
1135
|
+
sleep_time = max(0, frame_interval - elapsed)
|
|
1136
|
+
if sleep_time > 0:
|
|
1137
|
+
time.sleep(sleep_time)
|
|
1138
|
+
|
|
1139
|
+
except Exception as e:
|
|
1140
|
+
retry_mgr.handle_connection_failure(e)
|
|
1141
|
+
|
|
1142
|
+
finally:
|
|
1143
|
+
if pipeline:
|
|
1144
|
+
pipeline.stop()
|
|
1145
|
+
if stream_key in self.pipelines:
|
|
1146
|
+
del self.pipelines[stream_key]
|
|
1147
|
+
|
|
1148
|
+
# Determine retry behavior based on source type and simulation flag
|
|
1149
|
+
if not self._stop_streaming:
|
|
1150
|
+
if is_video_file and simulate_video_file_stream:
|
|
1151
|
+
# Video file with simulation enabled - restart immediately (no backoff)
|
|
1152
|
+
self.logger.info(f"Restarting video {stream_key} immediately for continuous simulation")
|
|
1153
|
+
time.sleep(0.1) # Brief pause to allow cleanup
|
|
1154
|
+
continue
|
|
1155
|
+
elif is_video_file and not simulate_video_file_stream:
|
|
1156
|
+
# Video file without simulation - playback complete, exit cleanly
|
|
1157
|
+
self.logger.info(f"Video {stream_key} playback complete (simulation disabled)")
|
|
1158
|
+
break # Exit outer loop
|
|
1159
|
+
else:
|
|
1160
|
+
# Camera or RTSP stream - apply exponential backoff for reconnection
|
|
1161
|
+
retry_mgr.wait_before_retry()
|
|
1162
|
+
|
|
1163
|
+
self.logger.info(f"GStreamer stream ended for {stream_key}")
|
|
1164
|
+
|
|
1165
|
+
def _process_and_send_frame(
|
|
1166
|
+
self,
|
|
1167
|
+
frame_data: bytes,
|
|
1168
|
+
stream_key: str,
|
|
1169
|
+
stream_group_key: str,
|
|
1170
|
+
topic: str,
|
|
1171
|
+
fps: int,
|
|
1172
|
+
quality: int,
|
|
1173
|
+
width: int,
|
|
1174
|
+
height: int,
|
|
1175
|
+
camera_location: str,
|
|
1176
|
+
frame_size: int,
|
|
1177
|
+
latency_ms: float,
|
|
1178
|
+
read_time: float
|
|
1179
|
+
):
|
|
1180
|
+
"""Build message and send to stream.
|
|
1181
|
+
|
|
1182
|
+
NOTE: GStreamer frame optimization limitation:
|
|
1183
|
+
- For h264/h265 encoders: Frames are already encoded in pipeline, so similarity
|
|
1184
|
+
detection would require decoding (CPU overhead negates benefit)
|
|
1185
|
+
- For JPEG encoder: Could decode and use FrameOptimizer, but JPEG is already
|
|
1186
|
+
per-frame compression with no inter-frame dependencies
|
|
1187
|
+
- Future enhancement: Add dual appsink (one pre-encoder for similarity check)
|
|
1188
|
+
"""
|
|
1189
|
+
# Get timing
|
|
1190
|
+
last_read, last_write, last_process = self.statistics.get_timing(stream_key)
|
|
1191
|
+
input_order = self.statistics.get_next_input_order(stream_key)
|
|
1192
|
+
|
|
1193
|
+
# Check frame similarity using hash-based detection for video codecs
|
|
1194
|
+
# (Only effective for truly identical frames, not similar frames like FrameOptimizer)
|
|
1195
|
+
is_similar = False
|
|
1196
|
+
reference_frame_id = self._last_sent_frame_ids.get(stream_key)
|
|
1197
|
+
|
|
1198
|
+
# Simple hash-based similarity for identical frames (basic optimization)
|
|
1199
|
+
if self.frame_optimizer.enabled and reference_frame_id:
|
|
1200
|
+
import hashlib
|
|
1201
|
+
frame_hash = hashlib.md5(frame_data).hexdigest()
|
|
1202
|
+
last_hash = getattr(self, '_last_frame_hashes', {}).get(stream_key)
|
|
1203
|
+
|
|
1204
|
+
if last_hash == frame_hash:
|
|
1205
|
+
is_similar = True
|
|
1206
|
+
else:
|
|
1207
|
+
# Store new hash
|
|
1208
|
+
if not hasattr(self, '_last_frame_hashes'):
|
|
1209
|
+
self._last_frame_hashes = {}
|
|
1210
|
+
self._last_frame_hashes[stream_key] = frame_hash
|
|
1211
|
+
|
|
1212
|
+
# Build metadata (use build_frame_metadata for consistency)
|
|
1213
|
+
# Since we don't have video_props, create minimal metadata inline
|
|
1214
|
+
# TODO: Future enhancement - call build_frame_metadata with proper video_props
|
|
1215
|
+
metadata = {
|
|
1216
|
+
"source": stream_key,
|
|
1217
|
+
"fps": fps,
|
|
1218
|
+
"quality": quality,
|
|
1219
|
+
"width": width,
|
|
1220
|
+
"height": height,
|
|
1221
|
+
"camera_location": camera_location,
|
|
1222
|
+
"feed_type": "camera",
|
|
1223
|
+
"frame_count": 1,
|
|
1224
|
+
"stream_unit": "frame",
|
|
1225
|
+
"encoder": "gstreamer",
|
|
1226
|
+
"codec": self.gstreamer_config.codec,
|
|
1227
|
+
"encoding_latency_ms": latency_ms,
|
|
1228
|
+
}
|
|
1229
|
+
|
|
1230
|
+
# Determine codec string for message
|
|
1231
|
+
codec = "jpeg" if self.gstreamer_config.encoder == "jpeg" else self.gstreamer_config.codec
|
|
1232
|
+
|
|
1233
|
+
# If frame is identical to previous, send cached frame reference
|
|
1234
|
+
if is_similar and reference_frame_id:
|
|
1235
|
+
metadata["similarity_score"] = 1.0 # Hash-based, so exact match
|
|
1236
|
+
codec = "cached"
|
|
1237
|
+
frame_data_to_send = b"" # Empty content for cached frame
|
|
1238
|
+
else:
|
|
1239
|
+
frame_data_to_send = frame_data
|
|
1240
|
+
|
|
1241
|
+
try:
|
|
1242
|
+
message = self.message_builder.build_message(
|
|
1243
|
+
frame_data=frame_data_to_send,
|
|
1244
|
+
stream_key=stream_key,
|
|
1245
|
+
stream_group_key=stream_group_key,
|
|
1246
|
+
codec=codec,
|
|
1247
|
+
metadata=metadata,
|
|
1248
|
+
topic=topic,
|
|
1249
|
+
broker_config=self.matrice_stream.config.get('bootstrap_servers', 'localhost'),
|
|
1250
|
+
input_order=input_order,
|
|
1251
|
+
last_read_time=last_read,
|
|
1252
|
+
last_write_time=last_write,
|
|
1253
|
+
last_process_time=last_process,
|
|
1254
|
+
cached_frame_id=reference_frame_id if is_similar else None,
|
|
1255
|
+
)
|
|
1256
|
+
|
|
1257
|
+
write_start = time.time()
|
|
1258
|
+
self.matrice_stream.add_message(
|
|
1259
|
+
topic_or_channel=topic,
|
|
1260
|
+
message=message,
|
|
1261
|
+
key=str(stream_key)
|
|
1262
|
+
)
|
|
1263
|
+
write_time = time.time() - write_start
|
|
1264
|
+
|
|
1265
|
+
# Track frame_id for future cached references
|
|
1266
|
+
if not is_similar:
|
|
1267
|
+
new_frame_id = message.get("frame_id")
|
|
1268
|
+
if new_frame_id:
|
|
1269
|
+
self._last_sent_frame_ids[stream_key] = new_frame_id
|
|
1270
|
+
|
|
1271
|
+
# Update statistics
|
|
1272
|
+
if is_similar:
|
|
1273
|
+
self.statistics.increment_frames_skipped()
|
|
1274
|
+
# For cached frames, frame_size is 0
|
|
1275
|
+
process_time = read_time + write_time
|
|
1276
|
+
self.statistics.update_timing(
|
|
1277
|
+
stream_key, read_time, write_time, process_time,
|
|
1278
|
+
0, 0 # No frame size or encoding time for cached
|
|
1279
|
+
)
|
|
1280
|
+
else:
|
|
1281
|
+
self.statistics.increment_frames_sent()
|
|
1282
|
+
process_time = read_time + write_time
|
|
1283
|
+
# Note: latency_ms is PTS-based pipeline latency, not pure encoding time
|
|
1284
|
+
# This is a known limitation - see Issue #15 in review
|
|
1285
|
+
self.statistics.update_timing(
|
|
1286
|
+
stream_key, read_time, write_time, process_time,
|
|
1287
|
+
frame_size, latency_ms / 1000
|
|
1288
|
+
)
|
|
1289
|
+
|
|
1290
|
+
except Exception as e:
|
|
1291
|
+
self.logger.error(f"Failed to send frame for {stream_key}: {e}")
|
|
1292
|
+
|
|
1293
|
+
def _maybe_log_metrics(self, stream_key: str):
|
|
1294
|
+
"""Log comprehensive metrics if interval has elapsed."""
|
|
1295
|
+
current_time = time.time()
|
|
1296
|
+
if (current_time - self._last_metrics_log_time) >= self._metrics_log_interval:
|
|
1297
|
+
# Log detailed per-stream statistics
|
|
1298
|
+
self.statistics.log_detailed_stats(stream_key)
|
|
1299
|
+
|
|
1300
|
+
# Log GStreamer-specific pipeline metrics
|
|
1301
|
+
if stream_key in self.pipelines:
|
|
1302
|
+
metrics = self.pipelines[stream_key].get_metrics()
|
|
1303
|
+
|
|
1304
|
+
# Calculate statistics
|
|
1305
|
+
avg_latency = 0.0
|
|
1306
|
+
if metrics.latencies:
|
|
1307
|
+
avg_latency = sum(metrics.latencies) / len(metrics.latencies) * 1000
|
|
1308
|
+
|
|
1309
|
+
bandwidth_mbps = 0.0
|
|
1310
|
+
if metrics.total_bytes > 0 and current_time - metrics.start_time > 0:
|
|
1311
|
+
duration = current_time - metrics.start_time
|
|
1312
|
+
bandwidth_mbps = (metrics.total_bytes * 8) / (duration * 1_000_000)
|
|
1313
|
+
|
|
1314
|
+
fps = 0.0
|
|
1315
|
+
if metrics.frames_processed > 0 and current_time - metrics.start_time > 0:
|
|
1316
|
+
fps = metrics.frames_processed / (current_time - metrics.start_time)
|
|
1317
|
+
|
|
1318
|
+
self.logger.info(
|
|
1319
|
+
f"GStreamer [{stream_key}] Pipeline Metrics: "
|
|
1320
|
+
f"encoder={self.gstreamer_config.encoder}, "
|
|
1321
|
+
f"frames={metrics.frames_processed}, "
|
|
1322
|
+
f"fps={fps:.1f}, "
|
|
1323
|
+
f"errors={metrics.errors}, "
|
|
1324
|
+
f"total_bytes={metrics.total_bytes / 1024 / 1024:.2f}MB, "
|
|
1325
|
+
f"bandwidth={bandwidth_mbps:.2f}Mbps, "
|
|
1326
|
+
f"avg_latency={avg_latency:.2f}ms"
|
|
1327
|
+
)
|
|
1328
|
+
|
|
1329
|
+
# Log frame optimization metrics
|
|
1330
|
+
stats = self.statistics.get_transmission_stats(
|
|
1331
|
+
f"gstreamer-{self.gstreamer_config.encoder}",
|
|
1332
|
+
len(self.streaming_threads)
|
|
1333
|
+
)
|
|
1334
|
+
frames_sent = stats.get('frames_sent', 0)
|
|
1335
|
+
frames_skipped = stats.get('frames_skipped', 0)
|
|
1336
|
+
total_frames = frames_sent + frames_skipped
|
|
1337
|
+
|
|
1338
|
+
if total_frames > 0:
|
|
1339
|
+
cache_efficiency = (frames_skipped / total_frames) * 100
|
|
1340
|
+
self.logger.info(
|
|
1341
|
+
f"GStreamer [{stream_key}] Frame Optimization: "
|
|
1342
|
+
f"sent={frames_sent}, "
|
|
1343
|
+
f"cached={frames_skipped}, "
|
|
1344
|
+
f"cache_efficiency={cache_efficiency:.1f}%, "
|
|
1345
|
+
f"bandwidth_saved={(cache_efficiency):.1f}%"
|
|
1346
|
+
)
|
|
1347
|
+
|
|
1348
|
+
self._last_metrics_log_time = current_time
|
|
1349
|
+
|
|
1350
|
+
# ========================================================================
|
|
1351
|
+
# Cleanup
|
|
1352
|
+
# ========================================================================
|
|
1353
|
+
|
|
1354
|
+
async def close(self):
|
|
1355
|
+
"""Clean up resources."""
|
|
1356
|
+
try:
|
|
1357
|
+
self.stop_streaming()
|
|
1358
|
+
await self.matrice_stream.async_close()
|
|
1359
|
+
self.matrice_stream.close()
|
|
1360
|
+
self.logger.info("GStreamerCameraStreamer closed")
|
|
1361
|
+
except Exception as e:
|
|
1362
|
+
self.logger.error(f"Error closing GStreamerCameraStreamer: {e}")
|
|
1363
|
+
|
|
1364
|
+
|
|
1365
|
+
def is_gstreamer_available() -> bool:
|
|
1366
|
+
"""Check if GStreamer is available on the system."""
|
|
1367
|
+
return GST_AVAILABLE
|
|
1368
|
+
|