matrice-streaming 0.1.14__py3-none-any.whl → 0.1.65__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- matrice_streaming/__init__.py +44 -32
- matrice_streaming/streaming_gateway/camera_streamer/__init__.py +68 -1
- matrice_streaming/streaming_gateway/camera_streamer/async_camera_worker.py +1388 -0
- matrice_streaming/streaming_gateway/camera_streamer/async_ffmpeg_worker.py +966 -0
- matrice_streaming/streaming_gateway/camera_streamer/camera_streamer.py +188 -24
- matrice_streaming/streaming_gateway/camera_streamer/device_detection.py +507 -0
- matrice_streaming/streaming_gateway/camera_streamer/encoding_pool_manager.py +136 -0
- matrice_streaming/streaming_gateway/camera_streamer/ffmpeg_camera_streamer.py +1048 -0
- matrice_streaming/streaming_gateway/camera_streamer/ffmpeg_config.py +192 -0
- matrice_streaming/streaming_gateway/camera_streamer/ffmpeg_worker_manager.py +470 -0
- matrice_streaming/streaming_gateway/camera_streamer/gstreamer_camera_streamer.py +1368 -0
- matrice_streaming/streaming_gateway/camera_streamer/gstreamer_worker.py +1063 -0
- matrice_streaming/streaming_gateway/camera_streamer/gstreamer_worker_manager.py +546 -0
- matrice_streaming/streaming_gateway/camera_streamer/message_builder.py +60 -15
- matrice_streaming/streaming_gateway/camera_streamer/nvdec.py +1330 -0
- matrice_streaming/streaming_gateway/camera_streamer/nvdec_worker_manager.py +412 -0
- matrice_streaming/streaming_gateway/camera_streamer/platform_pipelines.py +680 -0
- matrice_streaming/streaming_gateway/camera_streamer/stream_statistics.py +111 -4
- matrice_streaming/streaming_gateway/camera_streamer/video_capture_manager.py +223 -27
- matrice_streaming/streaming_gateway/camera_streamer/worker_manager.py +694 -0
- matrice_streaming/streaming_gateway/debug/__init__.py +27 -2
- matrice_streaming/streaming_gateway/debug/benchmark.py +727 -0
- matrice_streaming/streaming_gateway/debug/debug_gstreamer_gateway.py +599 -0
- matrice_streaming/streaming_gateway/debug/debug_streaming_gateway.py +245 -95
- matrice_streaming/streaming_gateway/debug/debug_utils.py +29 -0
- matrice_streaming/streaming_gateway/debug/test_videoplayback.py +318 -0
- matrice_streaming/streaming_gateway/dynamic_camera_manager.py +656 -39
- matrice_streaming/streaming_gateway/metrics_reporter.py +676 -139
- matrice_streaming/streaming_gateway/streaming_action.py +71 -20
- matrice_streaming/streaming_gateway/streaming_gateway.py +1026 -78
- matrice_streaming/streaming_gateway/streaming_gateway_utils.py +175 -20
- matrice_streaming/streaming_gateway/streaming_status_listener.py +89 -0
- {matrice_streaming-0.1.14.dist-info → matrice_streaming-0.1.65.dist-info}/METADATA +1 -1
- matrice_streaming-0.1.65.dist-info/RECORD +56 -0
- matrice_streaming-0.1.14.dist-info/RECORD +0 -38
- {matrice_streaming-0.1.14.dist-info → matrice_streaming-0.1.65.dist-info}/WHEEL +0 -0
- {matrice_streaming-0.1.14.dist-info → matrice_streaming-0.1.65.dist-info}/licenses/LICENSE.txt +0 -0
- {matrice_streaming-0.1.14.dist-info → matrice_streaming-0.1.65.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,727 @@
|
|
|
1
|
+
"""Comprehensive GStreamer benchmark suite for all streaming methods.
|
|
2
|
+
|
|
3
|
+
Tests and benchmarks:
|
|
4
|
+
1. CameraStreamer (standard OpenCV-based)
|
|
5
|
+
2. AsyncWorkers (multi-process OpenCV)
|
|
6
|
+
3. GStreamerCameraStreamer (single-process GStreamer)
|
|
7
|
+
4. GStreamerWorkerManager (multi-process GStreamer)
|
|
8
|
+
|
|
9
|
+
Measures:
|
|
10
|
+
- FPS (frames per second)
|
|
11
|
+
- Latency (read, encode, write, total)
|
|
12
|
+
- Bandwidth (Mbps)
|
|
13
|
+
- CPU usage
|
|
14
|
+
- Memory usage
|
|
15
|
+
- Cache efficiency
|
|
16
|
+
- GPU utilization (for NVENC)
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
import logging
|
|
20
|
+
import time
|
|
21
|
+
import psutil
|
|
22
|
+
import statistics
|
|
23
|
+
import json
|
|
24
|
+
from typing import List, Optional, Dict, Any, Tuple
|
|
25
|
+
from pathlib import Path
|
|
26
|
+
from dataclasses import dataclass, asdict
|
|
27
|
+
import threading
|
|
28
|
+
|
|
29
|
+
from .debug_streaming_gateway import DebugStreamingGateway
|
|
30
|
+
from .debug_gstreamer_gateway import DebugGStreamerGateway
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
@dataclass
|
|
34
|
+
class BenchmarkResult:
|
|
35
|
+
"""Benchmark result for a single test."""
|
|
36
|
+
method: str
|
|
37
|
+
encoder: str
|
|
38
|
+
codec: str
|
|
39
|
+
num_streams: int
|
|
40
|
+
fps_target: int
|
|
41
|
+
duration_seconds: float
|
|
42
|
+
|
|
43
|
+
# Performance metrics
|
|
44
|
+
avg_fps: float
|
|
45
|
+
min_fps: float
|
|
46
|
+
max_fps: float
|
|
47
|
+
fps_std: float
|
|
48
|
+
|
|
49
|
+
# Latency metrics (milliseconds)
|
|
50
|
+
avg_latency_ms: float
|
|
51
|
+
p50_latency_ms: float
|
|
52
|
+
p95_latency_ms: float
|
|
53
|
+
p99_latency_ms: float
|
|
54
|
+
|
|
55
|
+
# Throughput metrics
|
|
56
|
+
total_frames: int
|
|
57
|
+
frames_sent: int
|
|
58
|
+
frames_skipped: int
|
|
59
|
+
total_bytes: int
|
|
60
|
+
bandwidth_mbps: float
|
|
61
|
+
|
|
62
|
+
# Cache/optimization metrics
|
|
63
|
+
cache_efficiency_pct: float = 0.0
|
|
64
|
+
similarity_rate_pct: float = 0.0
|
|
65
|
+
|
|
66
|
+
# Resource utilization
|
|
67
|
+
avg_cpu_percent: float = 0.0
|
|
68
|
+
peak_cpu_percent: float = 0.0
|
|
69
|
+
avg_memory_mb: float = 0.0
|
|
70
|
+
peak_memory_mb: float = 0.0
|
|
71
|
+
|
|
72
|
+
# Success/failure
|
|
73
|
+
success: bool = True
|
|
74
|
+
error_message: Optional[str] = None
|
|
75
|
+
|
|
76
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
77
|
+
"""Convert to dictionary."""
|
|
78
|
+
return asdict(self)
|
|
79
|
+
|
|
80
|
+
def to_json(self, indent: int = 2) -> str:
|
|
81
|
+
"""Convert to JSON string."""
|
|
82
|
+
return json.dumps(self.to_dict(), indent=indent)
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
class ResourceMonitor:
|
|
86
|
+
"""Monitor CPU and memory usage during benchmark."""
|
|
87
|
+
|
|
88
|
+
def __init__(self, process: psutil.Process):
|
|
89
|
+
"""Initialize monitor.
|
|
90
|
+
|
|
91
|
+
Args:
|
|
92
|
+
process: Process to monitor
|
|
93
|
+
"""
|
|
94
|
+
self.process = process
|
|
95
|
+
self.cpu_samples = []
|
|
96
|
+
self.memory_samples = []
|
|
97
|
+
self.running = False
|
|
98
|
+
self.thread = None
|
|
99
|
+
self.sample_interval = 0.5 # seconds
|
|
100
|
+
|
|
101
|
+
def start(self):
|
|
102
|
+
"""Start monitoring."""
|
|
103
|
+
self.running = True
|
|
104
|
+
self.cpu_samples = []
|
|
105
|
+
self.memory_samples = []
|
|
106
|
+
self.thread = threading.Thread(target=self._monitor_loop, daemon=True)
|
|
107
|
+
self.thread.start()
|
|
108
|
+
|
|
109
|
+
def stop(self):
|
|
110
|
+
"""Stop monitoring."""
|
|
111
|
+
self.running = False
|
|
112
|
+
if self.thread:
|
|
113
|
+
self.thread.join(timeout=2.0)
|
|
114
|
+
|
|
115
|
+
def _monitor_loop(self):
|
|
116
|
+
"""Monitor loop."""
|
|
117
|
+
while self.running:
|
|
118
|
+
try:
|
|
119
|
+
# Get CPU percent (non-blocking)
|
|
120
|
+
cpu_percent = self.process.cpu_percent()
|
|
121
|
+
|
|
122
|
+
# Get memory usage in MB
|
|
123
|
+
memory_info = self.process.memory_info()
|
|
124
|
+
memory_mb = memory_info.rss / (1024 * 1024)
|
|
125
|
+
|
|
126
|
+
self.cpu_samples.append(cpu_percent)
|
|
127
|
+
self.memory_samples.append(memory_mb)
|
|
128
|
+
|
|
129
|
+
except (psutil.NoSuchProcess, psutil.AccessDenied):
|
|
130
|
+
pass
|
|
131
|
+
|
|
132
|
+
time.sleep(self.sample_interval)
|
|
133
|
+
|
|
134
|
+
def get_stats(self) -> Dict[str, float]:
|
|
135
|
+
"""Get resource usage statistics.
|
|
136
|
+
|
|
137
|
+
Returns:
|
|
138
|
+
Dictionary with CPU and memory stats
|
|
139
|
+
"""
|
|
140
|
+
stats = {
|
|
141
|
+
"avg_cpu_percent": 0.0,
|
|
142
|
+
"peak_cpu_percent": 0.0,
|
|
143
|
+
"avg_memory_mb": 0.0,
|
|
144
|
+
"peak_memory_mb": 0.0,
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
if self.cpu_samples:
|
|
148
|
+
stats["avg_cpu_percent"] = statistics.mean(self.cpu_samples)
|
|
149
|
+
stats["peak_cpu_percent"] = max(self.cpu_samples)
|
|
150
|
+
|
|
151
|
+
if self.memory_samples:
|
|
152
|
+
stats["avg_memory_mb"] = statistics.mean(self.memory_samples)
|
|
153
|
+
stats["peak_memory_mb"] = max(self.memory_samples)
|
|
154
|
+
|
|
155
|
+
return stats
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
class GStreamerBenchmark:
|
|
159
|
+
"""Comprehensive benchmark suite for all streaming methods."""
|
|
160
|
+
|
|
161
|
+
def __init__(
|
|
162
|
+
self,
|
|
163
|
+
video_paths: List[str],
|
|
164
|
+
output_dir: Optional[str] = None,
|
|
165
|
+
log_level: int = logging.INFO,
|
|
166
|
+
):
|
|
167
|
+
"""Initialize benchmark suite.
|
|
168
|
+
|
|
169
|
+
Args:
|
|
170
|
+
video_paths: List of video files to use for testing
|
|
171
|
+
output_dir: Directory to save benchmark results
|
|
172
|
+
log_level: Logging level
|
|
173
|
+
"""
|
|
174
|
+
# Validate video paths
|
|
175
|
+
for video_path in video_paths:
|
|
176
|
+
if not Path(video_path).exists():
|
|
177
|
+
raise FileNotFoundError(f"Video file not found: {video_path}")
|
|
178
|
+
|
|
179
|
+
self.video_paths = video_paths
|
|
180
|
+
self.output_dir = Path(output_dir) if output_dir else Path("benchmark_results")
|
|
181
|
+
self.output_dir.mkdir(parents=True, exist_ok=True)
|
|
182
|
+
|
|
183
|
+
# Setup logging
|
|
184
|
+
self.logger = logging.getLogger(__name__)
|
|
185
|
+
self.logger.setLevel(log_level)
|
|
186
|
+
|
|
187
|
+
# Results storage
|
|
188
|
+
self.results: List[BenchmarkResult] = []
|
|
189
|
+
|
|
190
|
+
self.logger.info(f"GStreamerBenchmark initialized with {len(video_paths)} videos")
|
|
191
|
+
|
|
192
|
+
def benchmark_standard_camera_streamer(
|
|
193
|
+
self,
|
|
194
|
+
fps: int = 30,
|
|
195
|
+
duration: float = 60.0,
|
|
196
|
+
codec: str = "h264",
|
|
197
|
+
) -> BenchmarkResult:
|
|
198
|
+
"""Benchmark standard CameraStreamer (OpenCV + OpenH264/x264).
|
|
199
|
+
|
|
200
|
+
Args:
|
|
201
|
+
fps: Target FPS
|
|
202
|
+
duration: Test duration in seconds
|
|
203
|
+
codec: Video codec (h264, h265-frame, h265-chunk)
|
|
204
|
+
|
|
205
|
+
Returns:
|
|
206
|
+
BenchmarkResult with performance metrics
|
|
207
|
+
"""
|
|
208
|
+
self.logger.info(f"Benchmarking CameraStreamer: fps={fps}, codec={codec}, duration={duration}s")
|
|
209
|
+
|
|
210
|
+
start_time = time.time()
|
|
211
|
+
gateway = None
|
|
212
|
+
resource_monitor = None
|
|
213
|
+
|
|
214
|
+
try:
|
|
215
|
+
# Create gateway
|
|
216
|
+
gateway = DebugStreamingGateway(
|
|
217
|
+
video_paths=self.video_paths,
|
|
218
|
+
fps=fps,
|
|
219
|
+
video_codec=codec,
|
|
220
|
+
loop_videos=True,
|
|
221
|
+
save_to_files=False,
|
|
222
|
+
log_messages=False,
|
|
223
|
+
)
|
|
224
|
+
|
|
225
|
+
# Start resource monitoring
|
|
226
|
+
process = psutil.Process()
|
|
227
|
+
resource_monitor = ResourceMonitor(process)
|
|
228
|
+
resource_monitor.start()
|
|
229
|
+
|
|
230
|
+
# Start streaming
|
|
231
|
+
if not gateway.start_streaming():
|
|
232
|
+
raise RuntimeError("Failed to start streaming")
|
|
233
|
+
|
|
234
|
+
# Run for duration
|
|
235
|
+
time.sleep(duration)
|
|
236
|
+
|
|
237
|
+
# Get statistics
|
|
238
|
+
stats = gateway.get_statistics()
|
|
239
|
+
|
|
240
|
+
# Stop streaming
|
|
241
|
+
gateway.stop_streaming()
|
|
242
|
+
|
|
243
|
+
# Stop resource monitoring
|
|
244
|
+
resource_monitor.stop()
|
|
245
|
+
resource_stats = resource_monitor.get_stats()
|
|
246
|
+
|
|
247
|
+
# Extract metrics
|
|
248
|
+
transmission_stats = stats.get("transmission_stats", {})
|
|
249
|
+
timing_stats = stats.get("timing_stats", {})
|
|
250
|
+
|
|
251
|
+
total_frames = transmission_stats.get("total_frames_sent", 0)
|
|
252
|
+
total_bytes = transmission_stats.get("total_bytes_sent", 0)
|
|
253
|
+
actual_duration = time.time() - start_time
|
|
254
|
+
|
|
255
|
+
# Calculate FPS metrics
|
|
256
|
+
avg_fps = total_frames / actual_duration if actual_duration > 0 else 0
|
|
257
|
+
|
|
258
|
+
# Calculate latency metrics from timing stats
|
|
259
|
+
latencies = []
|
|
260
|
+
for stream_timing in timing_stats.values():
|
|
261
|
+
if isinstance(stream_timing, dict):
|
|
262
|
+
process_time = stream_timing.get("avg_process_time", 0)
|
|
263
|
+
latencies.append(process_time * 1000) # Convert to ms
|
|
264
|
+
|
|
265
|
+
avg_latency = statistics.mean(latencies) if latencies else 0
|
|
266
|
+
p50_latency = statistics.median(latencies) if latencies else 0
|
|
267
|
+
p95_latency = statistics.quantiles(latencies, n=20)[18] if len(latencies) >= 20 else avg_latency
|
|
268
|
+
p99_latency = statistics.quantiles(latencies, n=100)[98] if len(latencies) >= 100 else avg_latency
|
|
269
|
+
|
|
270
|
+
# Calculate bandwidth
|
|
271
|
+
bandwidth_mbps = (total_bytes * 8) / (actual_duration * 1_000_000) if actual_duration > 0 else 0
|
|
272
|
+
|
|
273
|
+
result = BenchmarkResult(
|
|
274
|
+
method="CameraStreamer",
|
|
275
|
+
encoder="opencv",
|
|
276
|
+
codec=codec,
|
|
277
|
+
num_streams=len(self.video_paths),
|
|
278
|
+
fps_target=fps,
|
|
279
|
+
duration_seconds=actual_duration,
|
|
280
|
+
avg_fps=avg_fps,
|
|
281
|
+
min_fps=avg_fps, # Not tracked per-frame
|
|
282
|
+
max_fps=avg_fps,
|
|
283
|
+
fps_std=0.0,
|
|
284
|
+
avg_latency_ms=avg_latency,
|
|
285
|
+
p50_latency_ms=p50_latency,
|
|
286
|
+
p95_latency_ms=p95_latency,
|
|
287
|
+
p99_latency_ms=p99_latency,
|
|
288
|
+
total_frames=total_frames,
|
|
289
|
+
frames_sent=total_frames,
|
|
290
|
+
frames_skipped=0,
|
|
291
|
+
total_bytes=total_bytes,
|
|
292
|
+
bandwidth_mbps=bandwidth_mbps,
|
|
293
|
+
avg_cpu_percent=resource_stats["avg_cpu_percent"],
|
|
294
|
+
peak_cpu_percent=resource_stats["peak_cpu_percent"],
|
|
295
|
+
avg_memory_mb=resource_stats["avg_memory_mb"],
|
|
296
|
+
peak_memory_mb=resource_stats["peak_memory_mb"],
|
|
297
|
+
success=True,
|
|
298
|
+
)
|
|
299
|
+
|
|
300
|
+
self.results.append(result)
|
|
301
|
+
self.logger.info(f"CameraStreamer benchmark complete: {avg_fps:.1f} fps, {bandwidth_mbps:.2f} Mbps")
|
|
302
|
+
return result
|
|
303
|
+
|
|
304
|
+
except Exception as e:
|
|
305
|
+
self.logger.error(f"CameraStreamer benchmark failed: {e}", exc_info=True)
|
|
306
|
+
|
|
307
|
+
result = BenchmarkResult(
|
|
308
|
+
method="CameraStreamer",
|
|
309
|
+
encoder="opencv",
|
|
310
|
+
codec=codec,
|
|
311
|
+
num_streams=len(self.video_paths),
|
|
312
|
+
fps_target=fps,
|
|
313
|
+
duration_seconds=time.time() - start_time,
|
|
314
|
+
avg_fps=0.0,
|
|
315
|
+
min_fps=0.0,
|
|
316
|
+
max_fps=0.0,
|
|
317
|
+
fps_std=0.0,
|
|
318
|
+
avg_latency_ms=0.0,
|
|
319
|
+
p50_latency_ms=0.0,
|
|
320
|
+
p95_latency_ms=0.0,
|
|
321
|
+
p99_latency_ms=0.0,
|
|
322
|
+
total_frames=0,
|
|
323
|
+
frames_sent=0,
|
|
324
|
+
frames_skipped=0,
|
|
325
|
+
total_bytes=0,
|
|
326
|
+
bandwidth_mbps=0.0,
|
|
327
|
+
success=False,
|
|
328
|
+
error_message=str(e),
|
|
329
|
+
)
|
|
330
|
+
|
|
331
|
+
self.results.append(result)
|
|
332
|
+
return result
|
|
333
|
+
|
|
334
|
+
finally:
|
|
335
|
+
if gateway:
|
|
336
|
+
try:
|
|
337
|
+
gateway.stop_streaming()
|
|
338
|
+
except:
|
|
339
|
+
pass
|
|
340
|
+
if resource_monitor:
|
|
341
|
+
try:
|
|
342
|
+
resource_monitor.stop()
|
|
343
|
+
except:
|
|
344
|
+
pass
|
|
345
|
+
|
|
346
|
+
def benchmark_gstreamer_camera_streamer(
|
|
347
|
+
self,
|
|
348
|
+
fps: int = 30,
|
|
349
|
+
duration: float = 60.0,
|
|
350
|
+
encoder: str = "jpeg",
|
|
351
|
+
codec: str = "h264",
|
|
352
|
+
jpeg_quality: int = 85,
|
|
353
|
+
enable_frame_optimizer: bool = True,
|
|
354
|
+
) -> BenchmarkResult:
|
|
355
|
+
"""Benchmark GStreamerCameraStreamer (single-process GStreamer).
|
|
356
|
+
|
|
357
|
+
Args:
|
|
358
|
+
fps: Target FPS
|
|
359
|
+
duration: Test duration in seconds
|
|
360
|
+
encoder: GStreamer encoder (jpeg, nvenc, x264, openh264, auto)
|
|
361
|
+
codec: Codec for hardware/software encoders (h264, h265)
|
|
362
|
+
jpeg_quality: JPEG quality (1-100)
|
|
363
|
+
enable_frame_optimizer: Enable frame similarity detection
|
|
364
|
+
|
|
365
|
+
Returns:
|
|
366
|
+
BenchmarkResult with performance metrics
|
|
367
|
+
"""
|
|
368
|
+
self.logger.info(
|
|
369
|
+
f"Benchmarking GStreamerCameraStreamer: fps={fps}, encoder={encoder}, "
|
|
370
|
+
f"codec={codec}, duration={duration}s"
|
|
371
|
+
)
|
|
372
|
+
|
|
373
|
+
start_time = time.time()
|
|
374
|
+
gateway = None
|
|
375
|
+
resource_monitor = None
|
|
376
|
+
|
|
377
|
+
try:
|
|
378
|
+
# Create gateway
|
|
379
|
+
gateway = DebugGStreamerGateway(
|
|
380
|
+
video_paths=self.video_paths,
|
|
381
|
+
fps=fps,
|
|
382
|
+
loop_videos=True,
|
|
383
|
+
save_to_files=False,
|
|
384
|
+
log_messages=False,
|
|
385
|
+
gstreamer_encoder=encoder,
|
|
386
|
+
gstreamer_codec=codec,
|
|
387
|
+
jpeg_quality=jpeg_quality,
|
|
388
|
+
enable_frame_optimizer=enable_frame_optimizer,
|
|
389
|
+
)
|
|
390
|
+
|
|
391
|
+
# Start resource monitoring
|
|
392
|
+
process = psutil.Process()
|
|
393
|
+
resource_monitor = ResourceMonitor(process)
|
|
394
|
+
resource_monitor.start()
|
|
395
|
+
|
|
396
|
+
# Start streaming
|
|
397
|
+
if not gateway.start_streaming():
|
|
398
|
+
raise RuntimeError("Failed to start streaming")
|
|
399
|
+
|
|
400
|
+
# Run for duration
|
|
401
|
+
time.sleep(duration)
|
|
402
|
+
|
|
403
|
+
# Get statistics
|
|
404
|
+
stats = gateway.get_statistics()
|
|
405
|
+
|
|
406
|
+
# Stop streaming
|
|
407
|
+
gateway.stop_streaming()
|
|
408
|
+
|
|
409
|
+
# Stop resource monitoring
|
|
410
|
+
resource_monitor.stop()
|
|
411
|
+
resource_stats = resource_monitor.get_stats()
|
|
412
|
+
|
|
413
|
+
# Extract metrics
|
|
414
|
+
transmission_stats = stats.get("transmission_stats", {})
|
|
415
|
+
timing_stats = stats.get("timing_stats", {})
|
|
416
|
+
frame_opt_metrics = stats.get("frame_optimizer_metrics", {})
|
|
417
|
+
|
|
418
|
+
total_frames = transmission_stats.get("total_frames_sent", 0)
|
|
419
|
+
total_bytes = transmission_stats.get("total_bytes_sent", 0)
|
|
420
|
+
actual_duration = time.time() - start_time
|
|
421
|
+
|
|
422
|
+
# Calculate FPS metrics
|
|
423
|
+
avg_fps = stats.get("avg_fps", 0.0)
|
|
424
|
+
|
|
425
|
+
# Calculate latency metrics from timing stats
|
|
426
|
+
latencies = []
|
|
427
|
+
for stream_timing in timing_stats.values():
|
|
428
|
+
if isinstance(stream_timing, dict):
|
|
429
|
+
process_time = stream_timing.get("avg_process_time", 0)
|
|
430
|
+
latencies.append(process_time * 1000) # Convert to ms
|
|
431
|
+
|
|
432
|
+
avg_latency = statistics.mean(latencies) if latencies else 0
|
|
433
|
+
p50_latency = statistics.median(latencies) if latencies else 0
|
|
434
|
+
p95_latency = statistics.quantiles(latencies, n=20)[18] if len(latencies) >= 20 else avg_latency
|
|
435
|
+
p99_latency = statistics.quantiles(latencies, n=100)[98] if len(latencies) >= 100 else avg_latency
|
|
436
|
+
|
|
437
|
+
# Calculate bandwidth
|
|
438
|
+
bandwidth_mbps = stats.get("bandwidth_mbps", 0.0)
|
|
439
|
+
|
|
440
|
+
# Cache efficiency
|
|
441
|
+
cache_efficiency = stats.get("cache_efficiency", 0.0)
|
|
442
|
+
similarity_rate = frame_opt_metrics.get("similarity_rate", 0.0) if frame_opt_metrics else 0.0
|
|
443
|
+
|
|
444
|
+
result = BenchmarkResult(
|
|
445
|
+
method="GStreamerCameraStreamer",
|
|
446
|
+
encoder=encoder,
|
|
447
|
+
codec=codec,
|
|
448
|
+
num_streams=len(self.video_paths),
|
|
449
|
+
fps_target=fps,
|
|
450
|
+
duration_seconds=actual_duration,
|
|
451
|
+
avg_fps=avg_fps,
|
|
452
|
+
min_fps=avg_fps, # Not tracked per-frame
|
|
453
|
+
max_fps=avg_fps,
|
|
454
|
+
fps_std=0.0,
|
|
455
|
+
avg_latency_ms=avg_latency,
|
|
456
|
+
p50_latency_ms=p50_latency,
|
|
457
|
+
p95_latency_ms=p95_latency,
|
|
458
|
+
p99_latency_ms=p99_latency,
|
|
459
|
+
total_frames=total_frames,
|
|
460
|
+
frames_sent=total_frames,
|
|
461
|
+
frames_skipped=0,
|
|
462
|
+
total_bytes=total_bytes,
|
|
463
|
+
bandwidth_mbps=bandwidth_mbps,
|
|
464
|
+
cache_efficiency_pct=cache_efficiency,
|
|
465
|
+
similarity_rate_pct=similarity_rate,
|
|
466
|
+
avg_cpu_percent=resource_stats["avg_cpu_percent"],
|
|
467
|
+
peak_cpu_percent=resource_stats["peak_cpu_percent"],
|
|
468
|
+
avg_memory_mb=resource_stats["avg_memory_mb"],
|
|
469
|
+
peak_memory_mb=resource_stats["peak_memory_mb"],
|
|
470
|
+
success=True,
|
|
471
|
+
)
|
|
472
|
+
|
|
473
|
+
self.results.append(result)
|
|
474
|
+
self.logger.info(
|
|
475
|
+
f"GStreamerCameraStreamer benchmark complete: {avg_fps:.1f} fps, "
|
|
476
|
+
f"{bandwidth_mbps:.2f} Mbps, cache={cache_efficiency:.1f}%"
|
|
477
|
+
)
|
|
478
|
+
return result
|
|
479
|
+
|
|
480
|
+
except Exception as e:
|
|
481
|
+
self.logger.error(f"GStreamerCameraStreamer benchmark failed: {e}", exc_info=True)
|
|
482
|
+
|
|
483
|
+
result = BenchmarkResult(
|
|
484
|
+
method="GStreamerCameraStreamer",
|
|
485
|
+
encoder=encoder,
|
|
486
|
+
codec=codec,
|
|
487
|
+
num_streams=len(self.video_paths),
|
|
488
|
+
fps_target=fps,
|
|
489
|
+
duration_seconds=time.time() - start_time,
|
|
490
|
+
avg_fps=0.0,
|
|
491
|
+
min_fps=0.0,
|
|
492
|
+
max_fps=0.0,
|
|
493
|
+
fps_std=0.0,
|
|
494
|
+
avg_latency_ms=0.0,
|
|
495
|
+
p50_latency_ms=0.0,
|
|
496
|
+
p95_latency_ms=0.0,
|
|
497
|
+
p99_latency_ms=0.0,
|
|
498
|
+
total_frames=0,
|
|
499
|
+
frames_sent=0,
|
|
500
|
+
frames_skipped=0,
|
|
501
|
+
total_bytes=0,
|
|
502
|
+
bandwidth_mbps=0.0,
|
|
503
|
+
success=False,
|
|
504
|
+
error_message=str(e),
|
|
505
|
+
)
|
|
506
|
+
|
|
507
|
+
self.results.append(result)
|
|
508
|
+
return result
|
|
509
|
+
|
|
510
|
+
finally:
|
|
511
|
+
if gateway:
|
|
512
|
+
try:
|
|
513
|
+
gateway.stop_streaming()
|
|
514
|
+
except:
|
|
515
|
+
pass
|
|
516
|
+
if resource_monitor:
|
|
517
|
+
try:
|
|
518
|
+
resource_monitor.stop()
|
|
519
|
+
except:
|
|
520
|
+
pass
|
|
521
|
+
|
|
522
|
+
def run_comprehensive_benchmark(
|
|
523
|
+
self,
|
|
524
|
+
fps: int = 30,
|
|
525
|
+
duration: float = 60.0,
|
|
526
|
+
test_encoders: List[str] = None,
|
|
527
|
+
) -> Dict[str, Any]:
|
|
528
|
+
"""Run comprehensive benchmark across all methods and encoders.
|
|
529
|
+
|
|
530
|
+
Args:
|
|
531
|
+
fps: Target FPS
|
|
532
|
+
duration: Test duration per benchmark
|
|
533
|
+
test_encoders: List of encoders to test (default: jpeg, nvenc, x264)
|
|
534
|
+
|
|
535
|
+
Returns:
|
|
536
|
+
Dictionary with all benchmark results and comparisons
|
|
537
|
+
"""
|
|
538
|
+
if test_encoders is None:
|
|
539
|
+
test_encoders = ["jpeg", "nvenc", "x264", "openh264"]
|
|
540
|
+
|
|
541
|
+
self.logger.info(f"Starting comprehensive benchmark: {len(test_encoders)} encoders x {duration}s each")
|
|
542
|
+
|
|
543
|
+
# Test 1: Standard CameraStreamer with H.264
|
|
544
|
+
self.logger.info("\n" + "="*80)
|
|
545
|
+
self.logger.info("TEST 1: CameraStreamer (OpenCV + OpenH264)")
|
|
546
|
+
self.logger.info("="*80)
|
|
547
|
+
self.benchmark_standard_camera_streamer(fps=fps, duration=duration, codec="h264")
|
|
548
|
+
|
|
549
|
+
# Test 2-N: GStreamerCameraStreamer with different encoders
|
|
550
|
+
for encoder in test_encoders:
|
|
551
|
+
self.logger.info("\n" + "="*80)
|
|
552
|
+
self.logger.info(f"TEST: GStreamerCameraStreamer ({encoder})")
|
|
553
|
+
self.logger.info("="*80)
|
|
554
|
+
|
|
555
|
+
# Determine codec based on encoder
|
|
556
|
+
codec = "h264" # Default for most encoders
|
|
557
|
+
jpeg_quality = 85 if encoder == "jpeg" else None
|
|
558
|
+
|
|
559
|
+
self.benchmark_gstreamer_camera_streamer(
|
|
560
|
+
fps=fps,
|
|
561
|
+
duration=duration,
|
|
562
|
+
encoder=encoder,
|
|
563
|
+
codec=codec,
|
|
564
|
+
jpeg_quality=jpeg_quality if jpeg_quality else 85,
|
|
565
|
+
enable_frame_optimizer=True,
|
|
566
|
+
)
|
|
567
|
+
|
|
568
|
+
# Generate comparison report
|
|
569
|
+
report = self._generate_comparison_report()
|
|
570
|
+
|
|
571
|
+
# Save results
|
|
572
|
+
self._save_results(report)
|
|
573
|
+
|
|
574
|
+
return report
|
|
575
|
+
|
|
576
|
+
def _generate_comparison_report(self) -> Dict[str, Any]:
|
|
577
|
+
"""Generate comparison report from all benchmark results.
|
|
578
|
+
|
|
579
|
+
Returns:
|
|
580
|
+
Dictionary with comparison metrics and analysis
|
|
581
|
+
"""
|
|
582
|
+
if not self.results:
|
|
583
|
+
return {"error": "No benchmark results available"}
|
|
584
|
+
|
|
585
|
+
report = {
|
|
586
|
+
"summary": {
|
|
587
|
+
"total_tests": len(self.results),
|
|
588
|
+
"successful_tests": sum(1 for r in self.results if r.success),
|
|
589
|
+
"failed_tests": sum(1 for r in self.results if not r.success),
|
|
590
|
+
},
|
|
591
|
+
"results": [r.to_dict() for r in self.results],
|
|
592
|
+
"comparisons": {},
|
|
593
|
+
}
|
|
594
|
+
|
|
595
|
+
# Find best performers
|
|
596
|
+
successful_results = [r for r in self.results if r.success]
|
|
597
|
+
|
|
598
|
+
if successful_results:
|
|
599
|
+
# Best FPS
|
|
600
|
+
best_fps = max(successful_results, key=lambda r: r.avg_fps)
|
|
601
|
+
report["comparisons"]["best_fps"] = {
|
|
602
|
+
"method": best_fps.method,
|
|
603
|
+
"encoder": best_fps.encoder,
|
|
604
|
+
"fps": best_fps.avg_fps,
|
|
605
|
+
}
|
|
606
|
+
|
|
607
|
+
# Best latency
|
|
608
|
+
best_latency = min(successful_results, key=lambda r: r.avg_latency_ms)
|
|
609
|
+
report["comparisons"]["best_latency"] = {
|
|
610
|
+
"method": best_latency.method,
|
|
611
|
+
"encoder": best_latency.encoder,
|
|
612
|
+
"latency_ms": best_latency.avg_latency_ms,
|
|
613
|
+
}
|
|
614
|
+
|
|
615
|
+
# Best bandwidth efficiency (highest FPS per Mbps)
|
|
616
|
+
if any(r.bandwidth_mbps > 0 for r in successful_results):
|
|
617
|
+
best_efficiency = max(
|
|
618
|
+
(r for r in successful_results if r.bandwidth_mbps > 0),
|
|
619
|
+
key=lambda r: r.avg_fps / r.bandwidth_mbps
|
|
620
|
+
)
|
|
621
|
+
report["comparisons"]["best_bandwidth_efficiency"] = {
|
|
622
|
+
"method": best_efficiency.method,
|
|
623
|
+
"encoder": best_efficiency.encoder,
|
|
624
|
+
"fps_per_mbps": best_efficiency.avg_fps / best_efficiency.bandwidth_mbps,
|
|
625
|
+
}
|
|
626
|
+
|
|
627
|
+
# Best cache efficiency
|
|
628
|
+
if any(r.cache_efficiency_pct > 0 for r in successful_results):
|
|
629
|
+
best_cache = max(successful_results, key=lambda r: r.cache_efficiency_pct)
|
|
630
|
+
report["comparisons"]["best_cache_efficiency"] = {
|
|
631
|
+
"method": best_cache.method,
|
|
632
|
+
"encoder": best_cache.encoder,
|
|
633
|
+
"cache_efficiency_pct": best_cache.cache_efficiency_pct,
|
|
634
|
+
}
|
|
635
|
+
|
|
636
|
+
# Lowest CPU usage
|
|
637
|
+
best_cpu = min(successful_results, key=lambda r: r.avg_cpu_percent)
|
|
638
|
+
report["comparisons"]["lowest_cpu_usage"] = {
|
|
639
|
+
"method": best_cpu.method,
|
|
640
|
+
"encoder": best_cpu.encoder,
|
|
641
|
+
"cpu_percent": best_cpu.avg_cpu_percent,
|
|
642
|
+
}
|
|
643
|
+
|
|
644
|
+
return report
|
|
645
|
+
|
|
646
|
+
def _save_results(self, report: Dict[str, Any]):
|
|
647
|
+
"""Save benchmark results to file.
|
|
648
|
+
|
|
649
|
+
Args:
|
|
650
|
+
report: Comparison report dictionary
|
|
651
|
+
"""
|
|
652
|
+
# Save JSON
|
|
653
|
+
json_file = self.output_dir / f"benchmark_{int(time.time())}.json"
|
|
654
|
+
with open(json_file, "w") as f:
|
|
655
|
+
json.dump(report, f, indent=2)
|
|
656
|
+
|
|
657
|
+
self.logger.info(f"Benchmark results saved to: {json_file}")
|
|
658
|
+
|
|
659
|
+
# Save human-readable summary
|
|
660
|
+
summary_file = self.output_dir / f"benchmark_{int(time.time())}_summary.txt"
|
|
661
|
+
with open(summary_file, "w") as f:
|
|
662
|
+
f.write("="*80 + "\n")
|
|
663
|
+
f.write("GStreamer Benchmark Summary\n")
|
|
664
|
+
f.write("="*80 + "\n\n")
|
|
665
|
+
|
|
666
|
+
f.write(f"Total Tests: {report['summary']['total_tests']}\n")
|
|
667
|
+
f.write(f"Successful: {report['summary']['successful_tests']}\n")
|
|
668
|
+
f.write(f"Failed: {report['summary']['failed_tests']}\n\n")
|
|
669
|
+
|
|
670
|
+
f.write("="*80 + "\n")
|
|
671
|
+
f.write("Best Performers\n")
|
|
672
|
+
f.write("="*80 + "\n\n")
|
|
673
|
+
|
|
674
|
+
for metric, data in report.get("comparisons", {}).items():
|
|
675
|
+
f.write(f"{metric.replace('_', ' ').title()}:\n")
|
|
676
|
+
for key, value in data.items():
|
|
677
|
+
f.write(f" {key}: {value}\n")
|
|
678
|
+
f.write("\n")
|
|
679
|
+
|
|
680
|
+
f.write("="*80 + "\n")
|
|
681
|
+
f.write("Detailed Results\n")
|
|
682
|
+
f.write("="*80 + "\n\n")
|
|
683
|
+
|
|
684
|
+
for result in self.results:
|
|
685
|
+
f.write(f"Method: {result.method} ({result.encoder}/{result.codec})\n")
|
|
686
|
+
f.write(f" FPS: {result.avg_fps:.1f} (target: {result.fps_target})\n")
|
|
687
|
+
f.write(f" Latency: {result.avg_latency_ms:.1f}ms (p95: {result.p95_latency_ms:.1f}ms)\n")
|
|
688
|
+
f.write(f" Bandwidth: {result.bandwidth_mbps:.2f} Mbps\n")
|
|
689
|
+
f.write(f" Frames: {result.total_frames} ({result.frames_sent} sent, {result.frames_skipped} skipped)\n")
|
|
690
|
+
f.write(f" Cache: {result.cache_efficiency_pct:.1f}%\n")
|
|
691
|
+
f.write(f" CPU: {result.avg_cpu_percent:.1f}% (peak: {result.peak_cpu_percent:.1f}%)\n")
|
|
692
|
+
f.write(f" Memory: {result.avg_memory_mb:.1f} MB (peak: {result.peak_memory_mb:.1f} MB)\n")
|
|
693
|
+
f.write(f" Success: {result.success}\n")
|
|
694
|
+
if result.error_message:
|
|
695
|
+
f.write(f" Error: {result.error_message}\n")
|
|
696
|
+
f.write("\n")
|
|
697
|
+
|
|
698
|
+
self.logger.info(f"Benchmark summary saved to: {summary_file}")
|
|
699
|
+
|
|
700
|
+
def print_summary(self):
|
|
701
|
+
"""Print benchmark summary to console."""
|
|
702
|
+
if not self.results:
|
|
703
|
+
print("No benchmark results available")
|
|
704
|
+
return
|
|
705
|
+
|
|
706
|
+
print("\n" + "="*80)
|
|
707
|
+
print("GStreamer Benchmark Summary")
|
|
708
|
+
print("="*80 + "\n")
|
|
709
|
+
|
|
710
|
+
# Print table header
|
|
711
|
+
print(f"{'Method':<30} {'Encoder':<10} {'FPS':>8} {'Latency':>10} {'BW (Mbps)':>12} {'Cache %':>8} {'CPU %':>8}")
|
|
712
|
+
print("-"*80)
|
|
713
|
+
|
|
714
|
+
# Print results
|
|
715
|
+
for result in self.results:
|
|
716
|
+
method_str = f"{result.method}"
|
|
717
|
+
print(
|
|
718
|
+
f"{method_str:<30} "
|
|
719
|
+
f"{result.encoder:<10} "
|
|
720
|
+
f"{result.avg_fps:>8.1f} "
|
|
721
|
+
f"{result.avg_latency_ms:>10.1f} "
|
|
722
|
+
f"{result.bandwidth_mbps:>12.2f} "
|
|
723
|
+
f"{result.cache_efficiency_pct:>8.1f} "
|
|
724
|
+
f"{result.avg_cpu_percent:>8.1f}"
|
|
725
|
+
)
|
|
726
|
+
|
|
727
|
+
print("="*80 + "\n")
|