pixelflux 1.4.1__tar.gz → 1.4.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pixelflux might be problematic. Click here for more details.
- {pixelflux-1.4.1/pixelflux.egg-info → pixelflux-1.4.3}/PKG-INFO +1 -1
- {pixelflux-1.4.1 → pixelflux-1.4.3}/pixelflux/screen_capture_module.cpp +197 -451
- {pixelflux-1.4.1 → pixelflux-1.4.3/pixelflux.egg-info}/PKG-INFO +1 -1
- {pixelflux-1.4.1 → pixelflux-1.4.3}/pyproject.toml +24 -3
- {pixelflux-1.4.1 → pixelflux-1.4.3}/setup.py +9 -14
- {pixelflux-1.4.1 → pixelflux-1.4.3}/LICENSE +0 -0
- {pixelflux-1.4.1 → pixelflux-1.4.3}/MANIFEST.in +0 -0
- {pixelflux-1.4.1 → pixelflux-1.4.3}/README.md +0 -0
- {pixelflux-1.4.1 → pixelflux-1.4.3}/pixelflux/__init__.py +0 -0
- {pixelflux-1.4.1 → pixelflux-1.4.3}/pixelflux/__pycache__/__init__.cpython-311.pyc +0 -0
- {pixelflux-1.4.1 → pixelflux-1.4.3}/pixelflux/include/cuda.h +0 -0
- {pixelflux-1.4.1 → pixelflux-1.4.3}/pixelflux/include/nvEncodeAPI.h +0 -0
- {pixelflux-1.4.1 → pixelflux-1.4.3}/pixelflux/include/stb_image.h +0 -0
- {pixelflux-1.4.1 → pixelflux-1.4.3}/pixelflux/include/xxhash.c +0 -0
- {pixelflux-1.4.1 → pixelflux-1.4.3}/pixelflux/include/xxhash.h +0 -0
- {pixelflux-1.4.1 → pixelflux-1.4.3}/pixelflux.egg-info/SOURCES.txt +0 -0
- {pixelflux-1.4.1 → pixelflux-1.4.3}/pixelflux.egg-info/dependency_links.txt +0 -0
- {pixelflux-1.4.1 → pixelflux-1.4.3}/pixelflux.egg-info/top_level.txt +0 -0
- {pixelflux-1.4.1 → pixelflux-1.4.3}/setup.cfg +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: pixelflux
|
|
3
|
-
Version: 1.4.
|
|
3
|
+
Version: 1.4.3
|
|
4
4
|
Summary: A performant web native pixel delivery pipeline for diverse sources, blending VNC-inspired parallel processing of pixel buffers with flexible modern encoding formats.
|
|
5
5
|
Home-page: https://github.com/linuxserver/pixelflux
|
|
6
6
|
Author: Linuxserver.io
|
|
@@ -61,6 +61,12 @@
|
|
|
61
61
|
#define STB_IMAGE_IMPLEMENTATION
|
|
62
62
|
#endif
|
|
63
63
|
#include "stb_image.h"
|
|
64
|
+
extern "C" {
|
|
65
|
+
#include <libavcodec/avcodec.h>
|
|
66
|
+
#include <libavutil/hwcontext.h>
|
|
67
|
+
#include <libavutil/opt.h>
|
|
68
|
+
#include <libavutil/pixdesc.h>
|
|
69
|
+
}
|
|
64
70
|
|
|
65
71
|
typedef enum CUresult_enum { CUDA_SUCCESS = 0 } CUresult;
|
|
66
72
|
typedef int CUdevice;
|
|
@@ -121,64 +127,27 @@ typedef NVENCSTATUS(NVENCAPI* PFN_NvEncodeAPICreateInstance)(
|
|
|
121
127
|
NV_ENCODE_API_FUNCTION_LIST*);
|
|
122
128
|
|
|
123
129
|
/**
|
|
124
|
-
* @brief
|
|
125
|
-
* This struct
|
|
126
|
-
*
|
|
127
|
-
*
|
|
128
|
-
|
|
129
|
-
struct VaapiFunctions {
|
|
130
|
-
void *va_lib_handle = nullptr;
|
|
131
|
-
void *va_x11_lib_handle = nullptr;
|
|
132
|
-
void *va_drm_lib_handle = nullptr;
|
|
133
|
-
VADisplay (*vaGetDisplay)(Display*) = nullptr;
|
|
134
|
-
VADisplay (*vaGetDisplayDRM)(int) = nullptr;
|
|
135
|
-
VAStatus (*vaInitialize)(VADisplay, int*, int*) = nullptr;
|
|
136
|
-
VAStatus (*vaTerminate)(VADisplay) = nullptr;
|
|
137
|
-
const char * (*vaQueryVendorString)(VADisplay) = nullptr;
|
|
138
|
-
VAStatus (*vaCreateConfig)(VADisplay, VAProfile, VAEntrypoint, VAConfigAttrib*, int, VAConfigID*) = nullptr;
|
|
139
|
-
VAStatus (*vaDestroyConfig)(VADisplay, VAConfigID) = nullptr;
|
|
140
|
-
VAStatus (*vaCreateSurfaces)(VADisplay, unsigned int, unsigned int, unsigned int, VASurfaceID*, unsigned int, VASurfaceAttrib*, unsigned int) = nullptr;
|
|
141
|
-
VAStatus (*vaDestroySurfaces)(VADisplay, VASurfaceID*, int) = nullptr;
|
|
142
|
-
VAStatus (*vaCreateContext)(VADisplay, VAConfigID, int, int, int, VASurfaceID*, int, VAContextID*) = nullptr;
|
|
143
|
-
VAStatus (*vaDestroyContext)(VADisplay, VAContextID) = nullptr;
|
|
144
|
-
VAStatus (*vaCreateBuffer)(VADisplay, VAContextID, VABufferType, unsigned int, unsigned int, void*, VABufferID*) = nullptr;
|
|
145
|
-
VAStatus (*vaDestroyBuffer)(VADisplay, VABufferID) = nullptr;
|
|
146
|
-
VAStatus (*vaBeginPicture)(VADisplay, VAContextID, VASurfaceID) = nullptr;
|
|
147
|
-
VAStatus (*vaRenderPicture)(VADisplay, VAContextID, VABufferID*, int) = nullptr;
|
|
148
|
-
VAStatus (*vaEndPicture)(VADisplay, VAContextID) = nullptr;
|
|
149
|
-
VAStatus (*vaSyncSurface)(VADisplay, VASurfaceID) = nullptr;
|
|
150
|
-
VAStatus (*vaMapBuffer)(VADisplay, VABufferID, void**) = nullptr;
|
|
151
|
-
VAStatus (*vaUnmapBuffer)(VADisplay, VABufferID) = nullptr;
|
|
152
|
-
VAStatus (*vaDeriveImage)(VADisplay, VASurfaceID, VAImage*) = nullptr;
|
|
153
|
-
VAStatus (*vaDestroyImage)(VADisplay, VAImageID) = nullptr;
|
|
154
|
-
VAStatus (*vaCreateImage)(VADisplay, VAImageFormat*, int, int, VAImage*) = nullptr;
|
|
155
|
-
VAStatus (*vaPutImage)(VADisplay, VASurfaceID, VAImageID, int, int, unsigned int, unsigned int, int, int, unsigned int, unsigned int) = nullptr;
|
|
156
|
-
VAStatus (*vaGetConfigAttributes)(VADisplay, VAProfile, VAEntrypoint, VAConfigAttrib*, int) = nullptr;
|
|
157
|
-
};
|
|
158
|
-
VaapiFunctions g_vaapi_funcs;
|
|
159
|
-
|
|
160
|
-
/**
|
|
161
|
-
* @brief Manages the state of a VA-API H.264 encoder session.
|
|
162
|
-
* This struct encapsulates all the necessary handles and configuration for a
|
|
163
|
-
* VA-API encoding pipeline, including the display connection, configuration and
|
|
164
|
-
* context IDs, a pool of surfaces for video frames, and initialization status.
|
|
130
|
+
* @brief Manages the state of a VA-API H.264 encoder session using libavcodec.
|
|
131
|
+
* This struct encapsulates all necessary libav objects for a VA-API hardware-
|
|
132
|
+
* accelerated encoding pipeline. This includes the hardware device context,
|
|
133
|
+
* hardware frame context for surface allocation, the codec context for the
|
|
134
|
+
* h264_vaapi encoder, and reusable frame/packet objects.
|
|
165
135
|
*/
|
|
166
136
|
struct VaapiEncoderState {
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
137
|
+
AVBufferRef *hw_device_ctx = nullptr;
|
|
138
|
+
AVBufferRef *hw_frames_ctx = nullptr;
|
|
139
|
+
AVCodecContext *codec_ctx = nullptr;
|
|
140
|
+
AVFrame *sw_frame = nullptr;
|
|
141
|
+
AVFrame *hw_frame = nullptr;
|
|
142
|
+
AVPacket *packet = nullptr;
|
|
173
143
|
bool initialized = false;
|
|
174
144
|
int initialized_width = 0;
|
|
175
145
|
int initialized_height = 0;
|
|
176
146
|
int initialized_qp = -1;
|
|
147
|
+
bool initialized_is_444 = false;
|
|
177
148
|
unsigned int frame_count = 0;
|
|
178
|
-
VAPictureH264 last_ref_pic;
|
|
179
149
|
};
|
|
180
150
|
|
|
181
|
-
|
|
182
151
|
/**
|
|
183
152
|
* @brief Manages a pool of H.264 encoders and associated picture buffers.
|
|
184
153
|
* This struct provides thread-safe storage and management for x264 encoder
|
|
@@ -590,97 +559,12 @@ bool LoadNvencApi(NV_ENCODE_API_FUNCTION_LIST& nvenc_funcs) {
|
|
|
590
559
|
return true;
|
|
591
560
|
}
|
|
592
561
|
|
|
593
|
-
/**
|
|
594
|
-
* @brief Dynamically loads the VA-API libraries and resolves required function pointers.
|
|
595
|
-
*
|
|
596
|
-
* This function uses `dlopen` to load `libva.so.2` (or `.1`) and its backend
|
|
597
|
-
* libraries like `libva-drm.so.2`. It then uses `dlsym` to find the addresses
|
|
598
|
-
* of all necessary VA-API functions and stores them in the global
|
|
599
|
-
* `g_vaapi_funcs` struct. This must be called successfully before
|
|
600
|
-
* any other VA-API operations.
|
|
601
|
-
*
|
|
602
|
-
* @return true if all libraries were loaded and functions were resolved, false otherwise.
|
|
603
|
-
*/
|
|
604
|
-
bool LoadVaapiApi() {
|
|
605
|
-
if (g_vaapi_funcs.vaInitialize) {
|
|
606
|
-
return true;
|
|
607
|
-
}
|
|
608
|
-
|
|
609
|
-
g_vaapi_funcs.va_lib_handle = dlopen("libva.so.2", RTLD_LAZY);
|
|
610
|
-
if (!g_vaapi_funcs.va_lib_handle) {
|
|
611
|
-
g_vaapi_funcs.va_lib_handle = dlopen("libva.so.1", RTLD_LAZY);
|
|
612
|
-
}
|
|
613
|
-
if (!g_vaapi_funcs.va_lib_handle) {
|
|
614
|
-
std::cerr << "VAAPI_API_LOAD: dlopen failed for libva.so" << std::endl;
|
|
615
|
-
return false;
|
|
616
|
-
}
|
|
617
|
-
|
|
618
|
-
g_vaapi_funcs.va_drm_lib_handle = dlopen("libva-drm.so.2", RTLD_LAZY);
|
|
619
|
-
if (!g_vaapi_funcs.va_drm_lib_handle) {
|
|
620
|
-
g_vaapi_funcs.va_drm_lib_handle = dlopen("libva-drm.so.1", RTLD_LAZY);
|
|
621
|
-
}
|
|
622
|
-
if (!g_vaapi_funcs.va_drm_lib_handle) {
|
|
623
|
-
std::cerr << "VAAPI_API_LOAD: dlopen failed for libva-drm.so" << std::endl;
|
|
624
|
-
dlclose(g_vaapi_funcs.va_lib_handle);
|
|
625
|
-
g_vaapi_funcs.va_lib_handle = nullptr;
|
|
626
|
-
return false;
|
|
627
|
-
}
|
|
628
|
-
|
|
629
|
-
g_vaapi_funcs.va_x11_lib_handle = dlopen("libva-x11.so.2", RTLD_LAZY);
|
|
630
|
-
if (!g_vaapi_funcs.va_x11_lib_handle) {
|
|
631
|
-
g_vaapi_funcs.va_x11_lib_handle = dlopen("libva-x11.so.1", RTLD_LAZY);
|
|
632
|
-
}
|
|
633
|
-
|
|
634
|
-
auto unload_all_and_fail = [&]() {
|
|
635
|
-
std::cerr << "VAAPI_API_LOAD: dlsym failed for one or more functions." << std::endl;
|
|
636
|
-
if (g_vaapi_funcs.va_lib_handle) dlclose(g_vaapi_funcs.va_lib_handle);
|
|
637
|
-
if (g_vaapi_funcs.va_x11_lib_handle) dlclose(g_vaapi_funcs.va_x11_lib_handle);
|
|
638
|
-
if (g_vaapi_funcs.va_drm_lib_handle) dlclose(g_vaapi_funcs.va_drm_lib_handle);
|
|
639
|
-
g_vaapi_funcs = {};
|
|
640
|
-
return false;
|
|
641
|
-
};
|
|
642
|
-
|
|
643
|
-
#define LOAD_VA_FUNC(lib, name) \
|
|
644
|
-
g_vaapi_funcs.name = (decltype(g_vaapi_funcs.name))dlsym(lib, #name); \
|
|
645
|
-
if (!g_vaapi_funcs.name) return unload_all_and_fail()
|
|
646
|
-
|
|
647
|
-
LOAD_VA_FUNC(g_vaapi_funcs.va_drm_lib_handle, vaGetDisplayDRM);
|
|
648
|
-
LOAD_VA_FUNC(g_vaapi_funcs.va_lib_handle, vaInitialize);
|
|
649
|
-
LOAD_VA_FUNC(g_vaapi_funcs.va_lib_handle, vaTerminate);
|
|
650
|
-
LOAD_VA_FUNC(g_vaapi_funcs.va_lib_handle, vaQueryVendorString);
|
|
651
|
-
LOAD_VA_FUNC(g_vaapi_funcs.va_lib_handle, vaCreateConfig);
|
|
652
|
-
LOAD_VA_FUNC(g_vaapi_funcs.va_lib_handle, vaDestroyConfig);
|
|
653
|
-
LOAD_VA_FUNC(g_vaapi_funcs.va_lib_handle, vaCreateSurfaces);
|
|
654
|
-
LOAD_VA_FUNC(g_vaapi_funcs.va_lib_handle, vaDestroySurfaces);
|
|
655
|
-
LOAD_VA_FUNC(g_vaapi_funcs.va_lib_handle, vaCreateContext);
|
|
656
|
-
LOAD_VA_FUNC(g_vaapi_funcs.va_lib_handle, vaDestroyContext);
|
|
657
|
-
LOAD_VA_FUNC(g_vaapi_funcs.va_lib_handle, vaCreateBuffer);
|
|
658
|
-
LOAD_VA_FUNC(g_vaapi_funcs.va_lib_handle, vaDestroyBuffer);
|
|
659
|
-
LOAD_VA_FUNC(g_vaapi_funcs.va_lib_handle, vaBeginPicture);
|
|
660
|
-
LOAD_VA_FUNC(g_vaapi_funcs.va_lib_handle, vaRenderPicture);
|
|
661
|
-
LOAD_VA_FUNC(g_vaapi_funcs.va_lib_handle, vaEndPicture);
|
|
662
|
-
LOAD_VA_FUNC(g_vaapi_funcs.va_lib_handle, vaSyncSurface);
|
|
663
|
-
LOAD_VA_FUNC(g_vaapi_funcs.va_lib_handle, vaMapBuffer);
|
|
664
|
-
LOAD_VA_FUNC(g_vaapi_funcs.va_lib_handle, vaUnmapBuffer);
|
|
665
|
-
LOAD_VA_FUNC(g_vaapi_funcs.va_lib_handle, vaDeriveImage);
|
|
666
|
-
LOAD_VA_FUNC(g_vaapi_funcs.va_lib_handle, vaDestroyImage);
|
|
667
|
-
LOAD_VA_FUNC(g_vaapi_funcs.va_lib_handle, vaCreateImage);
|
|
668
|
-
LOAD_VA_FUNC(g_vaapi_funcs.va_lib_handle, vaPutImage);
|
|
669
|
-
LOAD_VA_FUNC(g_vaapi_funcs.va_lib_handle, vaGetConfigAttributes);
|
|
670
|
-
|
|
671
|
-
#undef LOAD_VA_FUNC
|
|
672
|
-
|
|
673
|
-
return true;
|
|
674
|
-
}
|
|
675
|
-
|
|
676
562
|
/**
|
|
677
563
|
* @brief Scans the system for available VA-API compatible DRM render nodes.
|
|
678
|
-
*
|
|
679
564
|
* This function searches the `/dev/dri/` directory for device files named
|
|
680
565
|
* `renderD*`, which represent GPU render nodes that can be used for
|
|
681
566
|
* hardware-accelerated computation like video encoding without needing a
|
|
682
567
|
* graphical display server.
|
|
683
|
-
*
|
|
684
568
|
* @return A sorted vector of strings, where each string is the full path to a
|
|
685
569
|
* found render node (e.g., "/dev/dri/renderD128").
|
|
686
570
|
*/
|
|
@@ -703,27 +587,6 @@ std::vector<std::string> find_vaapi_render_nodes() {
|
|
|
703
587
|
return nodes;
|
|
704
588
|
}
|
|
705
589
|
|
|
706
|
-
/**
|
|
707
|
-
* @brief Helper function to calculate log2(N) - 4 for H.264 SPS header fields.
|
|
708
|
-
*
|
|
709
|
-
* The H.264 specification requires certain fields in the Sequence Parameter Set
|
|
710
|
-
* (SPS), like `log2_max_frame_num_minus4`, to be encoded in this format. This
|
|
711
|
-
* function computes the value, clamping it to the valid range required by the spec.
|
|
712
|
-
*
|
|
713
|
-
* @param num The input number (e.g., GOP size or max picture order count).
|
|
714
|
-
* @return The calculated value suitable for the SPS field.
|
|
715
|
-
*/
|
|
716
|
-
static unsigned int get_log2_val_minus4(unsigned int num) {
|
|
717
|
-
unsigned int ret = 0;
|
|
718
|
-
while (num > 0) {
|
|
719
|
-
ret++;
|
|
720
|
-
num >>= 1;
|
|
721
|
-
}
|
|
722
|
-
if (ret < 4) ret = 4;
|
|
723
|
-
if (ret > 16) ret = 16;
|
|
724
|
-
return ret - 4;
|
|
725
|
-
}
|
|
726
|
-
|
|
727
590
|
/**
|
|
728
591
|
* @brief Callback function type for processing encoded stripes.
|
|
729
592
|
* @param result Pointer to the StripeEncodeResult containing the encoded data.
|
|
@@ -905,8 +768,8 @@ private:
|
|
|
905
768
|
bool initialize_nvenc_encoder(int width, int height, int target_qp, double fps, bool use_yuv444);
|
|
906
769
|
StripeEncodeResult encode_fullframe_nvenc(int width, int height, const uint8_t* y_plane, int y_stride, const uint8_t* u_plane, int u_stride, const uint8_t* v_plane, int v_stride, bool is_i444, int frame_counter, bool force_idr_frame);
|
|
907
770
|
void reset_vaapi_encoder();
|
|
908
|
-
bool initialize_vaapi_encoder(int render_node_idx, int width, int height, int qp);
|
|
909
|
-
StripeEncodeResult encode_fullframe_vaapi(int width, int height, double fps, const uint8_t* y_plane, int y_stride, const uint8_t*
|
|
771
|
+
bool initialize_vaapi_encoder(int render_node_idx, int width, int height, int qp, bool use_yuv444);
|
|
772
|
+
StripeEncodeResult encode_fullframe_vaapi(int width, int height, double fps, const uint8_t* y_plane, int y_stride, const uint8_t* u_plane, int u_stride, const uint8_t* v_plane, int v_stride, bool is_i444, int frame_counter, bool force_idr_frame);
|
|
910
773
|
|
|
911
774
|
void load_watermark_image();
|
|
912
775
|
void capture_loop();
|
|
@@ -1494,348 +1357,243 @@ StripeEncodeResult ScreenCaptureModule::encode_fullframe_nvenc(int width,
|
|
|
1494
1357
|
}
|
|
1495
1358
|
|
|
1496
1359
|
/**
|
|
1497
|
-
* @brief
|
|
1498
|
-
* This function is thread-safe. It
|
|
1499
|
-
*
|
|
1500
|
-
*
|
|
1360
|
+
* @brief Releases all resources associated with the VA-API encoder session.
|
|
1361
|
+
* This function is thread-safe. It frees all allocated libav objects,
|
|
1362
|
+
* including the codec context, hardware device and frame contexts, and reusable
|
|
1363
|
+
* frame and packet structures. It resets the state to uninitialized.
|
|
1501
1364
|
*/
|
|
1502
1365
|
void ScreenCaptureModule::reset_vaapi_encoder() {
|
|
1503
1366
|
std::lock_guard<std::mutex> lock(vaapi_mutex_);
|
|
1504
1367
|
if (!vaapi_state_.initialized) {
|
|
1505
1368
|
return;
|
|
1506
1369
|
}
|
|
1507
|
-
|
|
1508
|
-
|
|
1509
|
-
if (vaapi_state_.context_id != VA_INVALID_ID) {
|
|
1510
|
-
funcs.vaDestroyContext(vaapi_state_.display, vaapi_state_.context_id);
|
|
1370
|
+
if (vaapi_state_.codec_ctx) {
|
|
1371
|
+
avcodec_free_context(&vaapi_state_.codec_ctx);
|
|
1511
1372
|
}
|
|
1512
|
-
if (vaapi_state_.
|
|
1513
|
-
|
|
1373
|
+
if (vaapi_state_.hw_frames_ctx) {
|
|
1374
|
+
av_buffer_unref(&vaapi_state_.hw_frames_ctx);
|
|
1514
1375
|
}
|
|
1515
|
-
if (
|
|
1516
|
-
|
|
1376
|
+
if (vaapi_state_.hw_device_ctx) {
|
|
1377
|
+
av_buffer_unref(&vaapi_state_.hw_device_ctx);
|
|
1517
1378
|
}
|
|
1518
|
-
if (vaapi_state_.
|
|
1519
|
-
|
|
1379
|
+
if (vaapi_state_.sw_frame) {
|
|
1380
|
+
av_frame_free(&vaapi_state_.sw_frame);
|
|
1520
1381
|
}
|
|
1521
|
-
if (vaapi_state_.
|
|
1522
|
-
|
|
1382
|
+
if (vaapi_state_.hw_frame) {
|
|
1383
|
+
av_frame_free(&vaapi_state_.hw_frame);
|
|
1523
1384
|
}
|
|
1524
|
-
if (vaapi_state_.
|
|
1525
|
-
|
|
1385
|
+
if (vaapi_state_.packet) {
|
|
1386
|
+
av_packet_free(&vaapi_state_.packet);
|
|
1526
1387
|
}
|
|
1527
|
-
|
|
1528
1388
|
vaapi_state_ = {};
|
|
1389
|
+
if (debug_logging) {
|
|
1390
|
+
std::cout << "VAAPI: Encoder resources released." << std::endl;
|
|
1391
|
+
}
|
|
1529
1392
|
}
|
|
1530
1393
|
|
|
1531
1394
|
/**
|
|
1532
|
-
* @brief Initializes
|
|
1533
|
-
* This function is thread-safe. It
|
|
1534
|
-
*
|
|
1535
|
-
*
|
|
1536
|
-
*
|
|
1395
|
+
* @brief Initializes a VA-API H.264 hardware encoder using libavcodec.
|
|
1396
|
+
* This function is thread-safe. It configures and opens the 'h264_vaapi'
|
|
1397
|
+
* encoder. This involves creating a VA-API hardware device context for a
|
|
1398
|
+
* specific DRM render node, setting up a hardware frame context for GPU
|
|
1399
|
+
* surface management, and configuring the encoder with the specified
|
|
1400
|
+
* dimensions, quality (QP), and pixel format.
|
|
1537
1401
|
* @param render_node_idx The index of the /dev/dri/renderD node to use.
|
|
1538
1402
|
* @param width The target encoding width.
|
|
1539
1403
|
* @param height The target encoding height.
|
|
1540
|
-
* @param qp The target Quantization Parameter for CQP rate control.
|
|
1541
|
-
* @
|
|
1404
|
+
* @param qp The target Quantization Parameter for Constant QP (CQP) rate control.
|
|
1405
|
+
* @param use_yuv444 If true, configures the encoder for YUV 4:4:4 input;
|
|
1406
|
+
* otherwise, configures for YUV 4:2:0 (NV12).
|
|
1407
|
+
* @return True if the encoder was successfully initialized, false otherwise.
|
|
1542
1408
|
*/
|
|
1543
|
-
bool ScreenCaptureModule::initialize_vaapi_encoder(int render_node_idx, int width, int height, int qp) {
|
|
1409
|
+
bool ScreenCaptureModule::initialize_vaapi_encoder(int render_node_idx, int width, int height, int qp, bool use_yuv444) {
|
|
1544
1410
|
std::unique_lock<std::mutex> lock(vaapi_mutex_);
|
|
1545
|
-
|
|
1546
1411
|
if (vaapi_state_.initialized && vaapi_state_.initialized_width == width &&
|
|
1547
|
-
vaapi_state_.initialized_height == height && vaapi_state_.initialized_qp == qp
|
|
1412
|
+
vaapi_state_.initialized_height == height && vaapi_state_.initialized_qp == qp &&
|
|
1413
|
+
vaapi_state_.initialized_is_444 == use_yuv444) {
|
|
1548
1414
|
return true;
|
|
1549
1415
|
}
|
|
1550
|
-
|
|
1551
1416
|
if (vaapi_state_.initialized) {
|
|
1552
1417
|
lock.unlock();
|
|
1553
1418
|
reset_vaapi_encoder();
|
|
1554
1419
|
lock.lock();
|
|
1555
1420
|
}
|
|
1556
|
-
|
|
1557
|
-
|
|
1558
|
-
|
|
1421
|
+
int ret = 0;
|
|
1422
|
+
const AVCodec *codec = avcodec_find_encoder_by_name("h264_vaapi");
|
|
1423
|
+
if (!codec) {
|
|
1424
|
+
std::cerr << "VAAPI_INIT: Codec 'h264_vaapi' not found." << std::endl;
|
|
1559
1425
|
return false;
|
|
1560
1426
|
}
|
|
1561
|
-
|
|
1562
|
-
auto& funcs = g_vaapi_funcs;
|
|
1563
1427
|
std::vector<std::string> nodes = find_vaapi_render_nodes();
|
|
1564
1428
|
if (nodes.empty()) {
|
|
1565
1429
|
std::cerr << "VAAPI_INIT: No /dev/dri/renderD nodes found." << std::endl;
|
|
1566
1430
|
return false;
|
|
1567
1431
|
}
|
|
1568
|
-
|
|
1569
1432
|
std::string node_to_use = (render_node_idx >= 0 && render_node_idx < (int)nodes.size()) ? nodes[render_node_idx] : nodes[0];
|
|
1570
|
-
|
|
1571
|
-
|
|
1572
|
-
vaapi_state_.fd = open(node_to_use.c_str(), O_RDWR);
|
|
1573
|
-
if (vaapi_state_.fd < 0) {
|
|
1574
|
-
std::cerr << "VAAPI_INIT: Failed to open " << node_to_use << std::endl;
|
|
1575
|
-
return false;
|
|
1433
|
+
if (debug_logging) {
|
|
1434
|
+
std::cout << "VAAPI_INIT: Using render node: " << node_to_use << std::endl;
|
|
1576
1435
|
}
|
|
1577
|
-
|
|
1578
|
-
|
|
1579
|
-
|
|
1580
|
-
std::cerr << "VAAPI_INIT: vaGetDisplayDRM failed." << std::endl;
|
|
1581
|
-
close(vaapi_state_.fd);
|
|
1582
|
-
vaapi_state_.fd = -1;
|
|
1436
|
+
ret = av_hwdevice_ctx_create(&vaapi_state_.hw_device_ctx, AV_HWDEVICE_TYPE_VAAPI, node_to_use.c_str(), NULL, 0);
|
|
1437
|
+
if (ret < 0) {
|
|
1438
|
+
std::cerr << "VAAPI_INIT: Failed to create VAAPI hardware device context: " << ret << std::endl;
|
|
1583
1439
|
return false;
|
|
1584
1440
|
}
|
|
1585
|
-
|
|
1586
|
-
|
|
1587
|
-
|
|
1588
|
-
if (status != VA_STATUS_SUCCESS) {
|
|
1589
|
-
std::cerr << "VAAPI_INIT: vaInitialize failed: " << status << std::endl;
|
|
1441
|
+
vaapi_state_.codec_ctx = avcodec_alloc_context3(codec);
|
|
1442
|
+
if (!vaapi_state_.codec_ctx) {
|
|
1443
|
+
std::cerr << "VAAPI_INIT: Failed to allocate codec context." << std::endl;
|
|
1590
1444
|
return false;
|
|
1591
1445
|
}
|
|
1592
|
-
|
|
1593
|
-
|
|
1594
|
-
|
|
1595
|
-
|
|
1596
|
-
|
|
1597
|
-
|
|
1598
|
-
|
|
1599
|
-
|
|
1600
|
-
|
|
1601
|
-
if (
|
|
1602
|
-
(
|
|
1603
|
-
std::cout << "VAAPI_INIT: Driver supports CQP rate control." << std::endl;
|
|
1604
|
-
attribs.push_back({VAConfigAttribRateControl, VA_RC_CQP});
|
|
1446
|
+
vaapi_state_.codec_ctx->width = width;
|
|
1447
|
+
vaapi_state_.codec_ctx->height = height;
|
|
1448
|
+
vaapi_state_.codec_ctx->time_base = {1, (int)target_fps};
|
|
1449
|
+
vaapi_state_.codec_ctx->framerate = {(int)target_fps, 1};
|
|
1450
|
+
vaapi_state_.codec_ctx->pix_fmt = AV_PIX_FMT_VAAPI;
|
|
1451
|
+
vaapi_state_.codec_ctx->gop_size = INT_MAX;
|
|
1452
|
+
vaapi_state_.codec_ctx->max_b_frames = 0;
|
|
1453
|
+
av_opt_set(vaapi_state_.codec_ctx->priv_data, "tune", "zerolatency", 0);
|
|
1454
|
+
av_opt_set(vaapi_state_.codec_ctx->priv_data, "preset", "ultrafast", 0);
|
|
1455
|
+
if (use_yuv444) {
|
|
1456
|
+
av_opt_set_int(vaapi_state_.codec_ctx, "profile", AV_PROFILE_H264_HIGH_444_PREDICTIVE, 0);
|
|
1605
1457
|
} else {
|
|
1606
|
-
|
|
1458
|
+
av_opt_set_int(vaapi_state_.codec_ctx, "profile", AV_PROFILE_H264_HIGH, 0);
|
|
1607
1459
|
}
|
|
1608
|
-
|
|
1609
|
-
|
|
1610
|
-
|
|
1611
|
-
|
|
1612
|
-
std::
|
|
1613
|
-
|
|
1614
|
-
} else {
|
|
1615
|
-
std::cout << "VAAPI_INIT: Driver does NOT support packed headers. Skipping attribute." << std::endl;
|
|
1616
|
-
}
|
|
1617
|
-
|
|
1618
|
-
status = funcs.vaCreateConfig(vaapi_state_.display, va_profile, entrypoint, attribs.data(), attribs.size(), &vaapi_state_.config_id);
|
|
1619
|
-
if (status != VA_STATUS_SUCCESS) {
|
|
1620
|
-
std::cerr << "VAAPI_INIT: vaCreateConfig failed with Baseline profile: " << status << ". Trying VAProfileH264Main..." << std::endl;
|
|
1621
|
-
va_profile = VAProfileH264Main;
|
|
1622
|
-
status = funcs.vaCreateConfig(vaapi_state_.display, va_profile, entrypoint, attribs.data(), attribs.size(), &vaapi_state_.config_id);
|
|
1623
|
-
if (status != VA_STATUS_SUCCESS) {
|
|
1624
|
-
std::cerr << "VAAPI_INIT: vaCreateConfig failed with Main profile too: " << status << std::endl;
|
|
1625
|
-
std::cerr << "VAAPI_INIT: Retrying with ONLY VAConfigAttribRTFormat..." << std::endl;
|
|
1626
|
-
VAConfigAttrib minimal_attrib = {VAConfigAttribRTFormat, VA_RT_FORMAT_YUV420};
|
|
1627
|
-
status = funcs.vaCreateConfig(vaapi_state_.display, va_profile, entrypoint, &minimal_attrib, 1, &vaapi_state_.config_id);
|
|
1628
|
-
if (status != VA_STATUS_SUCCESS) {
|
|
1629
|
-
std::cerr << "VAAPI_INIT: Failed even with minimal config. Error: " << status << std::endl;
|
|
1630
|
-
return false;
|
|
1631
|
-
}
|
|
1632
|
-
std::cout << "VAAPI_INIT: Minimal config created successfully. Some features may be disabled." << std::endl;
|
|
1633
|
-
}
|
|
1460
|
+
av_opt_set(vaapi_state_.codec_ctx->priv_data, "rc_mode", "CQP", 0);
|
|
1461
|
+
av_opt_set_int(vaapi_state_.codec_ctx->priv_data, "qp", qp, 0);
|
|
1462
|
+
vaapi_state_.hw_frames_ctx = av_hwframe_ctx_alloc(vaapi_state_.hw_device_ctx);
|
|
1463
|
+
if (!vaapi_state_.hw_frames_ctx) {
|
|
1464
|
+
std::cerr << "VAAPI_INIT: Failed to create hardware frames context." << std::endl;
|
|
1465
|
+
return false;
|
|
1634
1466
|
}
|
|
1635
|
-
|
|
1636
|
-
|
|
1637
|
-
|
|
1638
|
-
|
|
1639
|
-
|
|
1640
|
-
|
|
1467
|
+
AVHWFramesContext *frames_ctx = (AVHWFramesContext *)(vaapi_state_.hw_frames_ctx->data);
|
|
1468
|
+
frames_ctx->format = AV_PIX_FMT_VAAPI;
|
|
1469
|
+
frames_ctx->sw_format = use_yuv444 ? AV_PIX_FMT_YUV444P : AV_PIX_FMT_NV12;
|
|
1470
|
+
frames_ctx->width = width;
|
|
1471
|
+
frames_ctx->height = height;
|
|
1472
|
+
frames_ctx->initial_pool_size = 20;
|
|
1473
|
+
ret = av_hwframe_ctx_init(vaapi_state_.hw_frames_ctx);
|
|
1474
|
+
if (ret < 0) {
|
|
1475
|
+
std::cerr << "VAAPI_INIT: Failed to initialize hardware frames context: " << ret << std::endl;
|
|
1641
1476
|
return false;
|
|
1642
1477
|
}
|
|
1643
|
-
|
|
1644
|
-
|
|
1645
|
-
|
|
1646
|
-
|
|
1478
|
+
vaapi_state_.codec_ctx->hw_frames_ctx = av_buffer_ref(vaapi_state_.hw_frames_ctx);
|
|
1479
|
+
if (!vaapi_state_.codec_ctx->hw_frames_ctx) {
|
|
1480
|
+
std::cerr << "VAAPI_INIT: Failed to link hardware frames context." << std::endl;
|
|
1481
|
+
return false;
|
|
1482
|
+
}
|
|
1483
|
+
ret = avcodec_open2(vaapi_state_.codec_ctx, codec, NULL);
|
|
1484
|
+
if (ret < 0) {
|
|
1485
|
+
std::cerr << "VAAPI_INIT: Failed to open codec: " << ret << std::endl;
|
|
1486
|
+
return false;
|
|
1487
|
+
}
|
|
1488
|
+
vaapi_state_.sw_frame = av_frame_alloc();
|
|
1489
|
+
vaapi_state_.hw_frame = av_frame_alloc();
|
|
1490
|
+
vaapi_state_.packet = av_packet_alloc();
|
|
1491
|
+
if (!vaapi_state_.sw_frame || !vaapi_state_.hw_frame || !vaapi_state_.packet) {
|
|
1492
|
+
std::cerr << "VAAPI_INIT: Failed to allocate reusable frame/packet objects." << std::endl;
|
|
1647
1493
|
return false;
|
|
1648
1494
|
}
|
|
1649
|
-
|
|
1650
1495
|
vaapi_state_.initialized = true;
|
|
1651
1496
|
vaapi_state_.initialized_width = width;
|
|
1652
1497
|
vaapi_state_.initialized_height = height;
|
|
1653
1498
|
vaapi_state_.initialized_qp = qp;
|
|
1499
|
+
vaapi_state_.initialized_is_444 = use_yuv444;
|
|
1654
1500
|
vaapi_state_.frame_count = 0;
|
|
1655
|
-
|
|
1501
|
+
if (debug_logging) {
|
|
1502
|
+
std::cout << "VAAPI_INIT: Encoder initialized successfully via FFmpeg for "
|
|
1503
|
+
<< width << "x" << height << " " << (use_yuv444 ? "YUV444P" : "NV12")
|
|
1504
|
+
<< " with QP " << qp << "." << std::endl;
|
|
1505
|
+
}
|
|
1656
1506
|
return true;
|
|
1657
1507
|
}
|
|
1658
1508
|
|
|
1659
1509
|
/**
|
|
1660
|
-
* @brief Encodes a full
|
|
1661
|
-
* This function is thread-safe. It
|
|
1662
|
-
*
|
|
1663
|
-
* and
|
|
1664
|
-
*
|
|
1665
|
-
* @param width The width of the frame.
|
|
1666
|
-
* @param height The height of the frame.
|
|
1667
|
-
* @param fps The target frames per second
|
|
1668
|
-
* @param y_plane Pointer to the Y plane data.
|
|
1669
|
-
* @param y_stride Stride
|
|
1670
|
-
* @param
|
|
1671
|
-
*
|
|
1672
|
-
* @param
|
|
1673
|
-
* @param
|
|
1674
|
-
*
|
|
1675
|
-
* @
|
|
1510
|
+
* @brief Encodes a full YUV frame using the initialized VA-API session.
|
|
1511
|
+
* This function is thread-safe. It takes YUV plane data, transfers it from
|
|
1512
|
+
* system memory to a hardware surface on the GPU, submits it to the encoder,
|
|
1513
|
+
* and retrieves the resulting H.264 bitstream packet. The encoded data is
|
|
1514
|
+
* packaged into a StripeEncodeResult with a prepended 10-byte custom header.
|
|
1515
|
+
* @param width The width of the input frame.
|
|
1516
|
+
* @param height The height of the input frame.
|
|
1517
|
+
* @param fps The target frames per second (used for PTS calculation).
|
|
1518
|
+
* @param y_plane Pointer to the start of the Y plane data.
|
|
1519
|
+
* @param y_stride Stride in bytes for the Y plane.
|
|
1520
|
+
* @param u_plane Pointer to the start of the U plane (for I444) or interleaved
|
|
1521
|
+
* UV plane (for NV12).
|
|
1522
|
+
* @param u_stride Stride in bytes for the U or UV plane.
|
|
1523
|
+
* @param v_plane Pointer to the start of the V plane (for I444); should be
|
|
1524
|
+
* nullptr for NV12.
|
|
1525
|
+
* @param v_stride Stride in bytes for the V plane.
|
|
1526
|
+
* @param is_i444 True if the input format is YUV444P, false for NV12.
|
|
1527
|
+
* @param frame_counter The unique identifier for the current frame.
|
|
1528
|
+
* @param force_idr_frame If true, flags the frame as a keyframe (IDR).
|
|
1529
|
+
* @return A StripeEncodeResult containing the encoded H.264 data. On failure
|
|
1530
|
+
* or if no packet is output, the result may be empty.
|
|
1531
|
+
* @throws std::runtime_error if a critical libav API call fails.
|
|
1676
1532
|
*/
|
|
1677
1533
|
StripeEncodeResult ScreenCaptureModule::encode_fullframe_vaapi(int width, int height, double fps,
|
|
1678
1534
|
const uint8_t* y_plane, int y_stride,
|
|
1679
|
-
const uint8_t*
|
|
1535
|
+
const uint8_t* u_plane, int u_stride,
|
|
1536
|
+
const uint8_t* v_plane, int v_stride,
|
|
1537
|
+
bool is_i444,
|
|
1680
1538
|
int frame_counter,
|
|
1681
1539
|
bool force_idr_frame) {
|
|
1682
|
-
StripeEncodeResult result;
|
|
1683
|
-
result.type = StripeDataType::H264;
|
|
1684
|
-
result.stripe_y_start = 0;
|
|
1685
|
-
result.stripe_height = height;
|
|
1686
|
-
result.frame_id = frame_counter;
|
|
1687
|
-
|
|
1688
1540
|
std::lock_guard<std::mutex> lock(vaapi_mutex_);
|
|
1689
1541
|
if (!vaapi_state_.initialized) {
|
|
1690
1542
|
throw std::runtime_error("VAAPI_ENCODE_FATAL: Not initialized.");
|
|
1691
1543
|
}
|
|
1692
|
-
|
|
1693
|
-
|
|
1694
|
-
|
|
1695
|
-
|
|
1696
|
-
|
|
1697
|
-
if (
|
|
1698
|
-
vaapi_state_.
|
|
1544
|
+
int ret = av_hwframe_get_buffer(vaapi_state_.hw_frames_ctx, vaapi_state_.hw_frame, 0);
|
|
1545
|
+
if (ret < 0) {
|
|
1546
|
+
throw std::runtime_error("VAAPI_ENCODE_ERROR: Failed to get hardware frame from pool: " + std::to_string(ret));
|
|
1547
|
+
}
|
|
1548
|
+
AVFrame *tmp_sw_frame = av_frame_alloc();
|
|
1549
|
+
if (!tmp_sw_frame) {
|
|
1550
|
+
av_frame_unref(vaapi_state_.hw_frame);
|
|
1551
|
+
throw std::runtime_error("VAAPI_ENCODE_ERROR: Failed to allocate temporary mapping frame.");
|
|
1552
|
+
}
|
|
1553
|
+
ret = av_hwframe_map(tmp_sw_frame, vaapi_state_.hw_frame, AV_HWFRAME_MAP_WRITE);
|
|
1554
|
+
if (ret < 0) {
|
|
1555
|
+
av_frame_free(&tmp_sw_frame);
|
|
1556
|
+
av_frame_unref(vaapi_state_.hw_frame);
|
|
1557
|
+
throw std::runtime_error("VAAPI_ENCODE_ERROR: Failed to map hardware frame for writing: " + std::to_string(ret));
|
|
1558
|
+
}
|
|
1559
|
+
if (is_i444) {
|
|
1560
|
+
libyuv::CopyPlane(y_plane, y_stride, tmp_sw_frame->data[0], tmp_sw_frame->linesize[0], width, height);
|
|
1561
|
+
libyuv::CopyPlane(u_plane, u_stride, tmp_sw_frame->data[1], tmp_sw_frame->linesize[1], width, height);
|
|
1562
|
+
libyuv::CopyPlane(v_plane, v_stride, tmp_sw_frame->data[2], tmp_sw_frame->linesize[2], width, height);
|
|
1563
|
+
} else {
|
|
1564
|
+
libyuv::CopyPlane(y_plane, y_stride, tmp_sw_frame->data[0], tmp_sw_frame->linesize[0], width, height);
|
|
1565
|
+
libyuv::CopyPlane(u_plane, u_stride, tmp_sw_frame->data[1], tmp_sw_frame->linesize[1], width, height / 2);
|
|
1566
|
+
}
|
|
1567
|
+
av_frame_unref(tmp_sw_frame);
|
|
1568
|
+
av_frame_free(&tmp_sw_frame);
|
|
1569
|
+
vaapi_state_.hw_frame->pts = vaapi_state_.frame_count++;
|
|
1570
|
+
if (force_idr_frame) {
|
|
1571
|
+
vaapi_state_.hw_frame->pict_type = AV_PICTURE_TYPE_I;
|
|
1572
|
+
} else {
|
|
1573
|
+
vaapi_state_.hw_frame->pict_type = AV_PICTURE_TYPE_NONE;
|
|
1699
1574
|
}
|
|
1700
|
-
|
|
1701
|
-
|
|
1702
|
-
|
|
1703
|
-
|
|
1704
|
-
image.width = width;
|
|
1705
|
-
image.height = height;
|
|
1706
|
-
|
|
1707
|
-
status = funcs.vaCreateImage(vaapi_state_.display, &image.format, width, height, &image);
|
|
1708
|
-
if (status != VA_STATUS_SUCCESS) throw std::runtime_error("vaCreateImage failed: " + std::to_string(status));
|
|
1709
|
-
|
|
1710
|
-
void *image_ptr = nullptr;
|
|
1711
|
-
status = funcs.vaMapBuffer(vaapi_state_.display, image.buf, &image_ptr);
|
|
1712
|
-
if (status != VA_STATUS_SUCCESS) {
|
|
1713
|
-
funcs.vaDestroyImage(vaapi_state_.display, image.image_id);
|
|
1714
|
-
throw std::runtime_error("vaMapBuffer for VAImage failed: " + std::to_string(status));
|
|
1715
|
-
}
|
|
1716
|
-
|
|
1717
|
-
uint8_t* y_dest = (uint8_t*)image_ptr + image.offsets[0];
|
|
1718
|
-
uint8_t* uv_dest = (uint8_t*)image_ptr + image.offsets[1];
|
|
1719
|
-
libyuv::CopyPlane(y_plane, y_stride, y_dest, image.pitches[0], width, height);
|
|
1720
|
-
libyuv::CopyPlane(uv_plane, uv_stride, uv_dest, image.pitches[1], width, height / 2);
|
|
1721
|
-
|
|
1722
|
-
funcs.vaUnmapBuffer(vaapi_state_.display, image.buf);
|
|
1723
|
-
status = funcs.vaPutImage(vaapi_state_.display, current_surface, image.image_id,
|
|
1724
|
-
0, 0, width, height, 0, 0, width, height);
|
|
1725
|
-
funcs.vaDestroyImage(vaapi_state_.display, image.image_id);
|
|
1726
|
-
if (status != VA_STATUS_SUCCESS) throw std::runtime_error("vaPutImage failed: " + std::to_string(status));
|
|
1727
|
-
}
|
|
1728
|
-
|
|
1729
|
-
if (vaapi_state_.coded_buffer_id == VA_INVALID_ID) {
|
|
1730
|
-
status = funcs.vaCreateBuffer(vaapi_state_.display, vaapi_state_.context_id, VAEncCodedBufferType,
|
|
1731
|
-
width * height * 3 / 2, 1, nullptr, &vaapi_state_.coded_buffer_id);
|
|
1732
|
-
if (status != VA_STATUS_SUCCESS) throw std::runtime_error("vaCreateBuffer for coded buffer failed.");
|
|
1733
|
-
}
|
|
1734
|
-
|
|
1735
|
-
std::vector<VABufferID> param_buffers;
|
|
1736
|
-
try {
|
|
1737
|
-
if (force_idr_frame) {
|
|
1738
|
-
VAEncSequenceParameterBufferH264 sps = {};
|
|
1739
|
-
const unsigned int gop_size = 30;
|
|
1740
|
-
const unsigned int max_ref_frames_in_gop = 1;
|
|
1741
|
-
|
|
1742
|
-
sps.seq_parameter_set_id = 0;
|
|
1743
|
-
sps.level_idc = 41;
|
|
1744
|
-
sps.intra_idr_period = gop_size;
|
|
1745
|
-
sps.intra_period = gop_size;
|
|
1746
|
-
sps.ip_period = 1;
|
|
1747
|
-
sps.bits_per_second = 0;
|
|
1748
|
-
sps.max_num_ref_frames = max_ref_frames_in_gop;
|
|
1749
|
-
sps.picture_width_in_mbs = (width + 15) / 16;
|
|
1750
|
-
sps.picture_height_in_mbs = (height + 15) / 16;
|
|
1751
|
-
sps.seq_fields.bits.chroma_format_idc = 1;
|
|
1752
|
-
sps.seq_fields.bits.frame_mbs_only_flag = 1;
|
|
1753
|
-
sps.seq_fields.bits.direct_8x8_inference_flag = 1;
|
|
1754
|
-
sps.seq_fields.bits.pic_order_cnt_type = 0;
|
|
1755
|
-
unsigned int log2_max_frame_num_val = get_log2_val_minus4(gop_size);
|
|
1756
|
-
sps.seq_fields.bits.log2_max_frame_num_minus4 = log2_max_frame_num_val;
|
|
1757
|
-
unsigned int poc_val = 1 << (log2_max_frame_num_val + 4 + 1);
|
|
1758
|
-
sps.seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = get_log2_val_minus4(poc_val);
|
|
1759
|
-
sps.vui_parameters_present_flag = 1;
|
|
1760
|
-
sps.vui_fields.bits.timing_info_present_flag = 1;
|
|
1761
|
-
sps.vui_fields.bits.fixed_frame_rate_flag = 1;
|
|
1762
|
-
sps.vui_fields.bits.bitstream_restriction_flag = 1;
|
|
1763
|
-
sps.vui_fields.bits.motion_vectors_over_pic_boundaries_flag = 1;
|
|
1764
|
-
sps.vui_fields.bits.aspect_ratio_info_present_flag = 1;
|
|
1765
|
-
sps.aspect_ratio_idc = 255;
|
|
1766
|
-
sps.sar_width = 1;
|
|
1767
|
-
sps.sar_height = 1;
|
|
1768
|
-
sps.num_units_in_tick = 1;
|
|
1769
|
-
sps.time_scale = static_cast<unsigned int>(fps * 2);
|
|
1770
|
-
|
|
1771
|
-
VABufferID buf_id;
|
|
1772
|
-
if (funcs.vaCreateBuffer(vaapi_state_.display, vaapi_state_.context_id, VAEncSequenceParameterBufferType, sizeof(sps), 1, &sps, &buf_id) != VA_STATUS_SUCCESS) throw std::runtime_error("vaCreateBuffer for SPS failed.");
|
|
1773
|
-
param_buffers.push_back(buf_id);
|
|
1774
|
-
}
|
|
1775
|
-
|
|
1776
|
-
{
|
|
1777
|
-
VAEncPictureParameterBufferH264 pps = {};
|
|
1778
|
-
VAPictureH264 current_va_picture = {current_surface, vaapi_state_.frame_count, 0, static_cast<int32_t>(vaapi_state_.frame_count * 2), static_cast<int32_t>(vaapi_state_.frame_count * 2)};
|
|
1779
|
-
pps.CurrPic = current_va_picture;
|
|
1780
|
-
for (int i = 0; i < 16; ++i) pps.ReferenceFrames[i] = {VA_INVALID_ID, VA_PICTURE_H264_INVALID, 0, 0, 0};
|
|
1781
|
-
if (!force_idr_frame) pps.ReferenceFrames[0] = vaapi_state_.last_ref_pic;
|
|
1782
|
-
pps.coded_buf = vaapi_state_.coded_buffer_id;
|
|
1783
|
-
pps.frame_num = vaapi_state_.frame_count;
|
|
1784
|
-
pps.pic_init_qp = vaapi_state_.initialized_qp;
|
|
1785
|
-
pps.pic_fields.bits.idr_pic_flag = force_idr_frame ? 1 : 0;
|
|
1786
|
-
pps.pic_fields.bits.reference_pic_flag = 1;
|
|
1787
|
-
pps.pic_fields.bits.entropy_coding_mode_flag = 1;
|
|
1788
|
-
pps.pic_fields.bits.deblocking_filter_control_present_flag = 1;
|
|
1789
|
-
pps.pic_fields.bits.transform_8x8_mode_flag = 1;
|
|
1790
|
-
VABufferID buf_id;
|
|
1791
|
-
if (funcs.vaCreateBuffer(vaapi_state_.display, vaapi_state_.context_id, VAEncPictureParameterBufferType, sizeof(pps), 1, &pps, &buf_id) != VA_STATUS_SUCCESS) throw std::runtime_error("vaCreateBuffer for PPS failed.");
|
|
1792
|
-
param_buffers.push_back(buf_id);
|
|
1793
|
-
}
|
|
1794
|
-
|
|
1795
|
-
{
|
|
1796
|
-
VAEncSliceParameterBufferH264 slice = {};
|
|
1797
|
-
slice.slice_type = force_idr_frame ? 2 : 0;
|
|
1798
|
-
slice.num_macroblocks = ((width + 15) / 16) * ((height + 15) / 16);
|
|
1799
|
-
slice.pic_order_cnt_lsb = (vaapi_state_.frame_count * 2);
|
|
1800
|
-
for (int i = 0; i < 32; ++i) slice.RefPicList0[i] = slice.RefPicList1[i] = {VA_INVALID_ID, VA_PICTURE_H264_INVALID, 0, 0, 0};
|
|
1801
|
-
if (!force_idr_frame) slice.RefPicList0[0] = vaapi_state_.last_ref_pic;
|
|
1802
|
-
VABufferID buf_id;
|
|
1803
|
-
if (funcs.vaCreateBuffer(vaapi_state_.display, vaapi_state_.context_id, VAEncSliceParameterBufferType, sizeof(slice), 1, &slice, &buf_id) != VA_STATUS_SUCCESS) throw std::runtime_error("vaCreateBuffer for Slice failed.");
|
|
1804
|
-
param_buffers.push_back(buf_id);
|
|
1805
|
-
}
|
|
1806
|
-
} catch (const std::runtime_error& e) {
|
|
1807
|
-
for (VABufferID buf_id : param_buffers) funcs.vaDestroyBuffer(vaapi_state_.display, buf_id);
|
|
1808
|
-
throw;
|
|
1575
|
+
ret = avcodec_send_frame(vaapi_state_.codec_ctx, vaapi_state_.hw_frame);
|
|
1576
|
+
av_frame_unref(vaapi_state_.hw_frame);
|
|
1577
|
+
if (ret < 0) {
|
|
1578
|
+
throw std::runtime_error("VAAPI_ENCODE_ERROR: Failed to send frame to encoder: " + std::to_string(ret));
|
|
1809
1579
|
}
|
|
1810
|
-
|
|
1811
|
-
|
|
1812
|
-
|
|
1813
|
-
|
|
1814
|
-
|
|
1815
|
-
if (status != VA_STATUS_SUCCESS) throw std::runtime_error("vaRenderPicture failed: " + std::to_string(status));
|
|
1816
|
-
|
|
1817
|
-
status = funcs.vaEndPicture(vaapi_state_.display, vaapi_state_.context_id);
|
|
1818
|
-
if (status != VA_STATUS_SUCCESS) {
|
|
1819
|
-
for(VABufferID buf_id : param_buffers) funcs.vaDestroyBuffer(vaapi_state_.display, buf_id);
|
|
1820
|
-
throw std::runtime_error("vaEndPicture failed: " + std::to_string(status));
|
|
1580
|
+
ret = avcodec_receive_packet(vaapi_state_.codec_ctx, vaapi_state_.packet);
|
|
1581
|
+
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
|
|
1582
|
+
return {};
|
|
1583
|
+
} else if (ret < 0) {
|
|
1584
|
+
throw std::runtime_error("VAAPI_ENCODE_ERROR: Failed to receive packet from encoder: " + std::to_string(ret));
|
|
1821
1585
|
}
|
|
1822
|
-
|
|
1823
|
-
|
|
1824
|
-
|
|
1825
|
-
|
|
1826
|
-
|
|
1827
|
-
|
|
1828
|
-
VACodedBufferSegment* coded_segment = nullptr;
|
|
1829
|
-
status = funcs.vaMapBuffer(vaapi_state_.display, vaapi_state_.coded_buffer_id, (void**)&coded_segment);
|
|
1830
|
-
if (status != VA_STATUS_SUCCESS) throw std::runtime_error("vaMapBuffer for coded data failed: " + std::to_string(status));
|
|
1831
|
-
|
|
1832
|
-
if (coded_segment && coded_segment->size > 0 && coded_segment->buf) {
|
|
1586
|
+
StripeEncodeResult result;
|
|
1587
|
+
result.type = StripeDataType::H264;
|
|
1588
|
+
result.stripe_y_start = 0;
|
|
1589
|
+
result.stripe_height = height;
|
|
1590
|
+
result.frame_id = frame_counter;
|
|
1591
|
+
if (vaapi_state_.packet->size > 0) {
|
|
1833
1592
|
const unsigned char TAG = 0x04;
|
|
1834
|
-
unsigned char type_hdr = (
|
|
1593
|
+
unsigned char type_hdr = (vaapi_state_.packet->flags & AV_PKT_FLAG_KEY) ? 0x01 : 0x00;
|
|
1835
1594
|
int header_sz = 10;
|
|
1836
|
-
result.data = new unsigned char[
|
|
1837
|
-
result.size =
|
|
1838
|
-
|
|
1595
|
+
result.data = new unsigned char[vaapi_state_.packet->size + header_sz];
|
|
1596
|
+
result.size = vaapi_state_.packet->size + header_sz;
|
|
1839
1597
|
result.data[0] = TAG;
|
|
1840
1598
|
result.data[1] = type_hdr;
|
|
1841
1599
|
uint16_t net_val = htons(static_cast<uint16_t>(result.frame_id % 65536));
|
|
@@ -1846,14 +1604,9 @@ StripeEncodeResult ScreenCaptureModule::encode_fullframe_vaapi(int width, int he
|
|
|
1846
1604
|
std::memcpy(result.data + 6, &net_val, 2);
|
|
1847
1605
|
net_val = htons(static_cast<uint16_t>(height));
|
|
1848
1606
|
std::memcpy(result.data + 8, &net_val, 2);
|
|
1849
|
-
std::memcpy(result.data + header_sz,
|
|
1607
|
+
std::memcpy(result.data + header_sz, vaapi_state_.packet->data, vaapi_state_.packet->size);
|
|
1850
1608
|
}
|
|
1851
|
-
|
|
1852
|
-
funcs.vaUnmapBuffer(vaapi_state_.display, vaapi_state_.coded_buffer_id);
|
|
1853
|
-
|
|
1854
|
-
vaapi_state_.last_ref_pic = {current_surface, vaapi_state_.frame_count, VA_PICTURE_H264_SHORT_TERM_REFERENCE, 0, 0};
|
|
1855
|
-
vaapi_state_.frame_count++;
|
|
1856
|
-
|
|
1609
|
+
av_packet_unref(vaapi_state_.packet);
|
|
1857
1610
|
return result;
|
|
1858
1611
|
}
|
|
1859
1612
|
|
|
@@ -2003,7 +1756,6 @@ void ScreenCaptureModule::overlay_image(int image_height, int image_width, const
|
|
|
2003
1756
|
* the encoded results and invokes the user-provided callback.
|
|
2004
1757
|
*/
|
|
2005
1758
|
void ScreenCaptureModule::capture_loop() {
|
|
2006
|
-
static bool vaapi_444_warning_shown = false;
|
|
2007
1759
|
auto start_time_loop = std::chrono::high_resolution_clock::now();
|
|
2008
1760
|
int frame_count_loop = 0;
|
|
2009
1761
|
|
|
@@ -2219,7 +1971,7 @@ void ScreenCaptureModule::capture_loop() {
|
|
|
2219
1971
|
if (!local_use_cpu && local_vaapi_render_node_index >= 0 &&
|
|
2220
1972
|
local_current_output_mode == OutputMode::H264 && local_current_h264_fullframe) {
|
|
2221
1973
|
if (this->initialize_vaapi_encoder(local_vaapi_render_node_index, local_capture_width_actual,
|
|
2222
|
-
local_capture_height_actual, local_current_h264_crf)) {
|
|
1974
|
+
local_capture_height_actual, local_current_h264_crf, local_current_h264_fullcolor)) {
|
|
2223
1975
|
this->vaapi_operational = true;
|
|
2224
1976
|
this->vaapi_force_next_idr_ = true;
|
|
2225
1977
|
std::cout << "VAAPI Encoder Initialized successfully." << std::endl;
|
|
@@ -2635,24 +2387,15 @@ void ScreenCaptureModule::capture_loop() {
|
|
|
2635
2387
|
}
|
|
2636
2388
|
}
|
|
2637
2389
|
|
|
2638
|
-
|
|
2639
2390
|
if (local_current_output_mode == OutputMode::H264) {
|
|
2640
|
-
bool
|
|
2641
|
-
if (force_420_conversion && !vaapi_444_warning_shown) {
|
|
2642
|
-
std::cerr << "VAAPI_WARNING: 4:4:4 colorspace is not supported in VAAPI mode. "
|
|
2643
|
-
"Forcing 4:2:0 conversion for encoder."
|
|
2644
|
-
<< std::endl;
|
|
2645
|
-
vaapi_444_warning_shown = true;
|
|
2646
|
-
}
|
|
2391
|
+
bool use_nv12_for_hw_encoder = (this->nvenc_operational || this->vaapi_operational) && !this->yuv_planes_are_i444_;
|
|
2647
2392
|
|
|
2648
|
-
|
|
2649
|
-
|
|
2650
|
-
if (use_nv12_direct_path) {
|
|
2393
|
+
if (use_nv12_for_hw_encoder) {
|
|
2651
2394
|
libyuv::ARGBToNV12(shm_data_ptr, shm_stride_bytes,
|
|
2652
2395
|
full_frame_y_plane_.data(), full_frame_y_stride_,
|
|
2653
2396
|
full_frame_u_plane_.data(), full_frame_u_stride_,
|
|
2654
2397
|
local_capture_width_actual, local_capture_height_actual);
|
|
2655
|
-
} else if (this->yuv_planes_are_i444_
|
|
2398
|
+
} else if (this->yuv_planes_are_i444_) {
|
|
2656
2399
|
libyuv::ARGBToI444(shm_data_ptr, shm_stride_bytes,
|
|
2657
2400
|
full_frame_y_plane_.data(), full_frame_y_stride_,
|
|
2658
2401
|
full_frame_u_plane_.data(), full_frame_u_stride_,
|
|
@@ -2967,6 +2710,9 @@ void ScreenCaptureModule::capture_loop() {
|
|
|
2967
2710
|
local_capture_width_actual, local_capture_height_actual, local_current_target_fps,
|
|
2968
2711
|
full_frame_y_plane_.data(), full_frame_y_stride_,
|
|
2969
2712
|
full_frame_u_plane_.data(), full_frame_u_stride_,
|
|
2713
|
+
this->yuv_planes_are_i444_ ? full_frame_v_plane_.data() : nullptr,
|
|
2714
|
+
this->yuv_planes_are_i444_ ? full_frame_v_stride_ : 0,
|
|
2715
|
+
this->yuv_planes_are_i444_,
|
|
2970
2716
|
this->frame_counter, force_idr
|
|
2971
2717
|
);
|
|
2972
2718
|
});
|
|
@@ -3424,18 +3170,18 @@ StripeEncodeResult encode_stripe_h264(
|
|
|
3424
3170
|
param.i_bframe = 0;
|
|
3425
3171
|
param.i_threads = h264_streaming_mode ? 0 : 1;
|
|
3426
3172
|
param.i_log_level = X264_LOG_ERROR;
|
|
3427
|
-
param.vui.b_fullrange =
|
|
3173
|
+
param.vui.b_fullrange = 0;
|
|
3428
3174
|
param.vui.i_sar_width = 1;
|
|
3429
3175
|
param.vui.i_sar_height = 1;
|
|
3430
3176
|
if (param.i_csp == X264_CSP_I444) {
|
|
3431
3177
|
param.vui.i_colorprim = 1;
|
|
3432
3178
|
param.vui.i_transfer = 1;
|
|
3433
|
-
param.vui.i_colmatrix =
|
|
3179
|
+
param.vui.i_colmatrix = 6;
|
|
3434
3180
|
x264_param_apply_profile(¶m, "high444");
|
|
3435
3181
|
} else {
|
|
3436
3182
|
param.vui.i_colorprim = 1;
|
|
3437
3183
|
param.vui.i_transfer = 1;
|
|
3438
|
-
param.vui.i_colmatrix =
|
|
3184
|
+
param.vui.i_colmatrix = 6;
|
|
3439
3185
|
x264_param_apply_profile(¶m, "baseline");
|
|
3440
3186
|
}
|
|
3441
3187
|
param.b_aud = 0;
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: pixelflux
|
|
3
|
-
Version: 1.4.
|
|
3
|
+
Version: 1.4.3
|
|
4
4
|
Summary: A performant web native pixel delivery pipeline for diverse sources, blending VNC-inspired parallel processing of pixel buffers with flexible modern encoding formats.
|
|
5
5
|
Home-page: https://github.com/linuxserver/pixelflux
|
|
6
6
|
Author: Linuxserver.io
|
|
@@ -5,7 +5,6 @@ build-backend = "setuptools.build_meta"
|
|
|
5
5
|
[tool.cibuildwheel]
|
|
6
6
|
archs = [ "x86_64", "aarch64" ]
|
|
7
7
|
|
|
8
|
-
# Add this new section
|
|
9
8
|
[tool.cibuildwheel.environment]
|
|
10
9
|
LD_LIBRARY_PATH = "/usr/local/lib:/usr/local/lib64"
|
|
11
10
|
|
|
@@ -57,8 +56,22 @@ before-all = """
|
|
|
57
56
|
make -j$(nproc) && \
|
|
58
57
|
make install) && \
|
|
59
58
|
\
|
|
59
|
+
(cd /tmp && \
|
|
60
|
+
git clone --branch n8.0 --depth 1 https://git.ffmpeg.org/ffmpeg.git && \
|
|
61
|
+
cd ffmpeg && \
|
|
62
|
+
./configure \
|
|
63
|
+
--prefix=/usr/local \
|
|
64
|
+
--enable-shared \
|
|
65
|
+
--disable-static \
|
|
66
|
+
--disable-programs \
|
|
67
|
+
--enable-gpl \
|
|
68
|
+
--enable-libx264 \
|
|
69
|
+
--extra-cflags="-I/usr/local/include" \
|
|
70
|
+
--extra-ldflags="-L/usr/local/lib" && \
|
|
71
|
+
make -j$(nproc) && \
|
|
72
|
+
make install) && \
|
|
73
|
+
\
|
|
60
74
|
ldconfig
|
|
61
|
-
|
|
62
75
|
elif command -v apk; then
|
|
63
76
|
apk add --no-cache \
|
|
64
77
|
libx11-dev \
|
|
@@ -72,9 +85,17 @@ before-all = """
|
|
|
72
85
|
libxcomposite-dev \
|
|
73
86
|
libva-dev \
|
|
74
87
|
libdrm-dev \
|
|
75
|
-
musl-dev
|
|
88
|
+
musl-dev \
|
|
89
|
+
ffmpeg-dev
|
|
76
90
|
else
|
|
77
91
|
echo "Unsupported package manager"
|
|
78
92
|
exit 1
|
|
79
93
|
fi
|
|
80
94
|
"""
|
|
95
|
+
repair-wheel-command = """
|
|
96
|
+
auditwheel repair -w {dest_dir} {wheel} \
|
|
97
|
+
--exclude libva.so.2 \
|
|
98
|
+
--exclude libva-drm.so.2 \
|
|
99
|
+
--exclude libva-x11.so.2 \
|
|
100
|
+
--exclude libdrm.so.2
|
|
101
|
+
"""
|
|
@@ -10,34 +10,32 @@ from setuptools.command.build_ext import build_ext
|
|
|
10
10
|
class BuildCtypesExt(build_ext):
|
|
11
11
|
def build_extensions(self):
|
|
12
12
|
compiler = self.compiler.compiler_cxx[0]
|
|
13
|
-
|
|
14
13
|
lib_dir = Path(self.build_lib)
|
|
15
|
-
|
|
16
14
|
output_path = lib_dir / "pixelflux" / "screen_capture_module.so"
|
|
17
|
-
|
|
18
15
|
output_path.parent.mkdir(parents=True, exist_ok=True)
|
|
19
|
-
|
|
20
16
|
sources = [
|
|
21
17
|
'pixelflux/screen_capture_module.cpp',
|
|
22
18
|
'pixelflux/include/xxhash.c'
|
|
23
19
|
]
|
|
24
20
|
include_dirs = ['pixelflux/include']
|
|
25
|
-
|
|
21
|
+
library_dirs = []
|
|
22
|
+
libraries = ['X11', 'Xext', 'Xfixes', 'jpeg', 'x264', 'yuv', 'dl', 'avcodec', 'avutil']
|
|
26
23
|
extra_compile_args = ['-std=c++17', '-Wno-unused-function', '-fPIC', '-O3', '-shared']
|
|
27
|
-
|
|
24
|
+
if os.environ.get("CIBUILDWHEEL"):
|
|
25
|
+
print("CIBUILDWHEEL environment detected. Adding /usr/local paths.")
|
|
26
|
+
include_dirs.append('/usr/local/include')
|
|
27
|
+
library_dirs.append('/usr/local/lib')
|
|
28
28
|
command = [compiler]
|
|
29
29
|
command.extend(extra_compile_args)
|
|
30
30
|
command.append('-o')
|
|
31
31
|
command.append(str(output_path))
|
|
32
|
-
|
|
33
32
|
for include_dir in include_dirs:
|
|
34
33
|
command.append(f'-I{include_dir}')
|
|
35
|
-
|
|
34
|
+
for lib_dir_path in library_dirs:
|
|
35
|
+
command.append(f'-L{lib_dir_path}')
|
|
36
36
|
command.extend(sources)
|
|
37
|
-
|
|
38
37
|
for lib in libraries:
|
|
39
38
|
command.append(f'-l{lib}')
|
|
40
|
-
|
|
41
39
|
print("Running build command:")
|
|
42
40
|
print(" ".join(command))
|
|
43
41
|
try:
|
|
@@ -45,15 +43,12 @@ class BuildCtypesExt(build_ext):
|
|
|
45
43
|
except subprocess.CalledProcessError as e:
|
|
46
44
|
print(f"Build failed with exit code {e.returncode}", file=sys.stderr)
|
|
47
45
|
sys.exit(1)
|
|
48
|
-
|
|
49
46
|
print(f"Successfully built {output_path}")
|
|
50
|
-
|
|
51
47
|
with open("README.md", "r", encoding="utf-8") as fh:
|
|
52
48
|
long_description = fh.read()
|
|
53
|
-
|
|
54
49
|
setup(
|
|
55
50
|
name="pixelflux",
|
|
56
|
-
version="1.4.
|
|
51
|
+
version="1.4.3",
|
|
57
52
|
author="Linuxserver.io",
|
|
58
53
|
author_email="pypi@linuxserver.io",
|
|
59
54
|
description="A performant web native pixel delivery pipeline for diverse sources, blending VNC-inspired parallel processing of pixel buffers with flexible modern encoding formats.",
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|